sha
stringlengths 40
40
| text
stringlengths 0
13.4M
| id
stringlengths 2
117
| tags
sequence | created_at
stringlengths 25
25
| metadata
stringlengths 2
31.7M
| last_modified
stringlengths 25
25
|
---|---|---|---|---|---|---|
7856f35a21912301d5d5e610a095968a80450d3a | Siddheshwari19/Seattle_100_SVIs | [
"region:us"
] | 2024-02-16T23:01:12+00:00 | {} | 2024-02-16T23:03:09+00:00 |
|
f1d7280d53820a24cec367bd75767839c3ff41a9 | wakamex/github | [
"region:us"
] | 2024-02-16T23:08:27+00:00 | {} | 2024-02-17T17:10:54+00:00 |
|
5761ff8ccc22c986b285bf39701e438d5c1ad025 | ktrinh38/hollister | [
"region:us"
] | 2024-02-16T23:08:57+00:00 | {"dataset_info": {"features": [{"name": "folder", "dtype": "string"}, {"name": "path", "dtype": "string"}, {"name": "image", "dtype": "image"}], "splits": [{"name": "train", "num_bytes": 14066393902.869, "num_examples": 1791}], "download_size": 14099936711, "dataset_size": 14066393902.869}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T23:25:14+00:00 |
|
a2ad01252be51ab3945b73327d73d1d3b5fed297 | nccratliri/whisperseg-conda-env | [
"region:us"
] | 2024-02-16T23:11:27+00:00 | {} | 2024-02-16T23:14:52+00:00 |
|
63859932e52e82d63cc35d96fa0a39dde229c714 |
# Dataset Card for Evaluation run of fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_merged_s
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_merged_s](https://huggingface.co/fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_merged_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_merged_s",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-16T23:09:18.709191](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_merged_s/blob/main/results_2024-02-16T23-09-18.709191.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6322971394538486,
"acc_stderr": 0.03234149565305396,
"acc_norm": 0.63180337036129,
"acc_norm_stderr": 0.03300828288156676,
"mc1": 0.4675642594859241,
"mc1_stderr": 0.017466632149577613,
"mc2": 0.6329203738044532,
"mc2_stderr": 0.01541374646266871
},
"harness|arc:challenge|25": {
"acc": 0.6527303754266212,
"acc_stderr": 0.013913034529620444,
"acc_norm": 0.6715017064846417,
"acc_norm_stderr": 0.013724978465537302
},
"harness|hellaswag|10": {
"acc": 0.6754630551682932,
"acc_stderr": 0.004672447046820005,
"acc_norm": 0.8568014339772954,
"acc_norm_stderr": 0.003495593662520757
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6074074074074074,
"acc_stderr": 0.0421850621536888,
"acc_norm": 0.6074074074074074,
"acc_norm_stderr": 0.0421850621536888
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6710526315789473,
"acc_stderr": 0.03823428969926605,
"acc_norm": 0.6710526315789473,
"acc_norm_stderr": 0.03823428969926605
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7132075471698113,
"acc_stderr": 0.027834912527544074,
"acc_norm": 0.7132075471698113,
"acc_norm_stderr": 0.027834912527544074
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.037455547914624555,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.037455547914624555
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6589595375722543,
"acc_stderr": 0.03614665424180826,
"acc_norm": 0.6589595375722543,
"acc_norm_stderr": 0.03614665424180826
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.39215686274509803,
"acc_stderr": 0.048580835742663454,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.048580835742663454
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909283
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5574468085106383,
"acc_stderr": 0.03246956919789958,
"acc_norm": 0.5574468085106383,
"acc_norm_stderr": 0.03246956919789958
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5241379310344828,
"acc_stderr": 0.0416180850350153,
"acc_norm": 0.5241379310344828,
"acc_norm_stderr": 0.0416180850350153
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.025107425481137282,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.025107425481137282
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04360314860077459
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7677419354838709,
"acc_stderr": 0.024022256130308235,
"acc_norm": 0.7677419354838709,
"acc_norm_stderr": 0.024022256130308235
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.458128078817734,
"acc_stderr": 0.03505630140785741,
"acc_norm": 0.458128078817734,
"acc_norm_stderr": 0.03505630140785741
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.65,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.65,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7515151515151515,
"acc_stderr": 0.033744026441394036,
"acc_norm": 0.7515151515151515,
"acc_norm_stderr": 0.033744026441394036
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7525252525252525,
"acc_stderr": 0.030746300742124484,
"acc_norm": 0.7525252525252525,
"acc_norm_stderr": 0.030746300742124484
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8808290155440415,
"acc_stderr": 0.023381935348121427,
"acc_norm": 0.8808290155440415,
"acc_norm_stderr": 0.023381935348121427
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6512820512820513,
"acc_stderr": 0.02416278028401772,
"acc_norm": 0.6512820512820513,
"acc_norm_stderr": 0.02416278028401772
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.028133252578815642,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.028133252578815642
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6512605042016807,
"acc_stderr": 0.030956636328566548,
"acc_norm": 0.6512605042016807,
"acc_norm_stderr": 0.030956636328566548
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.2781456953642384,
"acc_stderr": 0.03658603262763743,
"acc_norm": 0.2781456953642384,
"acc_norm_stderr": 0.03658603262763743
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8311926605504587,
"acc_stderr": 0.016060056268530343,
"acc_norm": 0.8311926605504587,
"acc_norm_stderr": 0.016060056268530343
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5231481481481481,
"acc_stderr": 0.03406315360711507,
"acc_norm": 0.5231481481481481,
"acc_norm_stderr": 0.03406315360711507
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8088235294117647,
"acc_stderr": 0.027599174300640763,
"acc_norm": 0.8088235294117647,
"acc_norm_stderr": 0.027599174300640763
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7890295358649789,
"acc_stderr": 0.02655837250266192,
"acc_norm": 0.7890295358649789,
"acc_norm_stderr": 0.02655837250266192
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.03102441174057221,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.03102441174057221
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7557251908396947,
"acc_stderr": 0.03768335959728743,
"acc_norm": 0.7557251908396947,
"acc_norm_stderr": 0.03768335959728743
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7870370370370371,
"acc_stderr": 0.0395783547198098,
"acc_norm": 0.7870370370370371,
"acc_norm_stderr": 0.0395783547198098
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7300613496932515,
"acc_stderr": 0.03487825168497892,
"acc_norm": 0.7300613496932515,
"acc_norm_stderr": 0.03487825168497892
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4375,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04708567521880525
},
"harness|hendrycksTest-management|5": {
"acc": 0.7475728155339806,
"acc_stderr": 0.04301250399690878,
"acc_norm": 0.7475728155339806,
"acc_norm_stderr": 0.04301250399690878
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8931623931623932,
"acc_stderr": 0.020237149008990925,
"acc_norm": 0.8931623931623932,
"acc_norm_stderr": 0.020237149008990925
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8288633461047255,
"acc_stderr": 0.013468201614066302,
"acc_norm": 0.8288633461047255,
"acc_norm_stderr": 0.013468201614066302
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7225433526011561,
"acc_stderr": 0.02410571260775431,
"acc_norm": 0.7225433526011561,
"acc_norm_stderr": 0.02410571260775431
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.37206703910614525,
"acc_stderr": 0.016165847583563295,
"acc_norm": 0.37206703910614525,
"acc_norm_stderr": 0.016165847583563295
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6928104575163399,
"acc_stderr": 0.026415601914388992,
"acc_norm": 0.6928104575163399,
"acc_norm_stderr": 0.026415601914388992
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7009646302250804,
"acc_stderr": 0.02600330111788514,
"acc_norm": 0.7009646302250804,
"acc_norm_stderr": 0.02600330111788514
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7129629629629629,
"acc_stderr": 0.02517104191530968,
"acc_norm": 0.7129629629629629,
"acc_norm_stderr": 0.02517104191530968
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4716312056737589,
"acc_stderr": 0.02977945095730307,
"acc_norm": 0.4716312056737589,
"acc_norm_stderr": 0.02977945095730307
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.44589308996088656,
"acc_stderr": 0.012695244711379774,
"acc_norm": 0.44589308996088656,
"acc_norm_stderr": 0.012695244711379774
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6213235294117647,
"acc_stderr": 0.02946513363977613,
"acc_norm": 0.6213235294117647,
"acc_norm_stderr": 0.02946513363977613
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6584967320261438,
"acc_stderr": 0.019184639328092487,
"acc_norm": 0.6584967320261438,
"acc_norm_stderr": 0.019184639328092487
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6909090909090909,
"acc_stderr": 0.044262946482000985,
"acc_norm": 0.6909090909090909,
"acc_norm_stderr": 0.044262946482000985
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7346938775510204,
"acc_stderr": 0.028263889943784603,
"acc_norm": 0.7346938775510204,
"acc_norm_stderr": 0.028263889943784603
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8706467661691543,
"acc_stderr": 0.023729830881018515,
"acc_norm": 0.8706467661691543,
"acc_norm_stderr": 0.023729830881018515
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.83,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.83,
"acc_norm_stderr": 0.0377525168068637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5180722891566265,
"acc_stderr": 0.03889951252827216,
"acc_norm": 0.5180722891566265,
"acc_norm_stderr": 0.03889951252827216
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8245614035087719,
"acc_stderr": 0.029170885500727665,
"acc_norm": 0.8245614035087719,
"acc_norm_stderr": 0.029170885500727665
},
"harness|truthfulqa:mc|0": {
"mc1": 0.4675642594859241,
"mc1_stderr": 0.017466632149577613,
"mc2": 0.6329203738044532,
"mc2_stderr": 0.01541374646266871
},
"harness|winogrande|5": {
"acc": 0.7955801104972375,
"acc_stderr": 0.011334090612597207
},
"harness|gsm8k|5": {
"acc": 0.6982562547384382,
"acc_stderr": 0.012643544762873358
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_merged_s | [
"region:us"
] | 2024-02-16T23:11:41+00:00 | {"pretty_name": "Evaluation run of fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_merged_s", "dataset_summary": "Dataset automatically created during the evaluation run of model [fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_merged_s](https://huggingface.co/fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_merged_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_merged_s\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-16T23:09:18.709191](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_merged_s/blob/main/results_2024-02-16T23-09-18.709191.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6322971394538486,\n \"acc_stderr\": 0.03234149565305396,\n \"acc_norm\": 0.63180337036129,\n \"acc_norm_stderr\": 0.03300828288156676,\n \"mc1\": 0.4675642594859241,\n \"mc1_stderr\": 0.017466632149577613,\n \"mc2\": 0.6329203738044532,\n \"mc2_stderr\": 0.01541374646266871\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6527303754266212,\n \"acc_stderr\": 0.013913034529620444,\n \"acc_norm\": 0.6715017064846417,\n \"acc_norm_stderr\": 0.013724978465537302\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6754630551682932,\n \"acc_stderr\": 0.004672447046820005,\n \"acc_norm\": 0.8568014339772954,\n \"acc_norm_stderr\": 0.003495593662520757\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926605,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926605\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544074,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544074\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.037455547914624555,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.037455547914624555\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.03614665424180826,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.03614665424180826\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.048580835742663454,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.048580835742663454\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.025107425481137282,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.025107425481137282\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7525252525252525,\n \"acc_stderr\": 0.030746300742124484,\n \"acc_norm\": 0.7525252525252525,\n \"acc_norm_stderr\": 0.030746300742124484\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121427,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121427\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6512820512820513,\n \"acc_stderr\": 0.02416278028401772,\n \"acc_norm\": 0.6512820512820513,\n \"acc_norm_stderr\": 0.02416278028401772\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815642,\n \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815642\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6512605042016807,\n \"acc_stderr\": 0.030956636328566548,\n \"acc_norm\": 0.6512605042016807,\n \"acc_norm_stderr\": 0.030956636328566548\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8311926605504587,\n \"acc_stderr\": 0.016060056268530343,\n \"acc_norm\": 0.8311926605504587,\n \"acc_norm_stderr\": 0.016060056268530343\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.027599174300640763,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.027599174300640763\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728743,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728743\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.03487825168497892,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.03487825168497892\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n \"acc_stderr\": 0.020237149008990925,\n \"acc_norm\": 0.8931623931623932,\n \"acc_norm_stderr\": 0.020237149008990925\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8288633461047255,\n \"acc_stderr\": 0.013468201614066302,\n \"acc_norm\": 0.8288633461047255,\n \"acc_norm_stderr\": 0.013468201614066302\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.02410571260775431,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.02410571260775431\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37206703910614525,\n \"acc_stderr\": 0.016165847583563295,\n \"acc_norm\": 0.37206703910614525,\n \"acc_norm_stderr\": 0.016165847583563295\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6928104575163399,\n \"acc_stderr\": 0.026415601914388992,\n \"acc_norm\": 0.6928104575163399,\n \"acc_norm_stderr\": 0.026415601914388992\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.02517104191530968,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.02517104191530968\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4716312056737589,\n \"acc_stderr\": 0.02977945095730307,\n \"acc_norm\": 0.4716312056737589,\n \"acc_norm_stderr\": 0.02977945095730307\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44589308996088656,\n \"acc_stderr\": 0.012695244711379774,\n \"acc_norm\": 0.44589308996088656,\n \"acc_norm_stderr\": 0.012695244711379774\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6213235294117647,\n \"acc_stderr\": 0.02946513363977613,\n \"acc_norm\": 0.6213235294117647,\n \"acc_norm_stderr\": 0.02946513363977613\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6584967320261438,\n \"acc_stderr\": 0.019184639328092487,\n \"acc_norm\": 0.6584967320261438,\n \"acc_norm_stderr\": 0.019184639328092487\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784603,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784603\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8706467661691543,\n \"acc_stderr\": 0.023729830881018515,\n \"acc_norm\": 0.8706467661691543,\n \"acc_norm_stderr\": 0.023729830881018515\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4675642594859241,\n \"mc1_stderr\": 0.017466632149577613,\n \"mc2\": 0.6329203738044532,\n \"mc2_stderr\": 0.01541374646266871\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7955801104972375,\n \"acc_stderr\": 0.011334090612597207\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6982562547384382,\n \"acc_stderr\": 0.012643544762873358\n }\n}\n```", "repo_url": "https://huggingface.co/fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_merged_s", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|arc:challenge|25_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|gsm8k|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hellaswag|10_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-16T23-09-18.709191.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["**/details_harness|winogrande|5_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-16T23-09-18.709191.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_16T23_09_18.709191", "path": ["results_2024-02-16T23-09-18.709191.parquet"]}, {"split": "latest", "path": ["results_2024-02-16T23-09-18.709191.parquet"]}]}]} | 2024-02-16T23:12:03+00:00 |
ccb2edd3b10bdba2debc8eee16836a715347b565 | cameronfyfe/example-manifest-1 | [
"region:us"
] | 2024-02-16T23:15:14+00:00 | {} | 2024-02-16T23:15:14+00:00 |
|
528885a1dc9c49448cd15af9c2798e190f1dc200 | vishruthnath/Calc-asdiv-Tagged | [
"region:us"
] | 2024-02-16T23:24:34+00:00 | {"dataset_info": {"features": [{"name": "chain", "dtype": "string"}, {"name": "equation", "dtype": "string"}, {"name": "grade", "dtype": "int64"}, {"name": "id", "dtype": "string"}, {"name": "num_unique_ops", "dtype": "int64"}, {"name": "operand", "sequence": "float64"}, {"name": "operand_tags", "sequence": "int64"}, {"name": "operation", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "question_split", "sequence": "string"}, {"name": "result", "dtype": "string"}, {"name": "result_float", "dtype": "float64"}, {"name": "result_unit", "dtype": "string"}, {"name": "source_question", "dtype": "string"}, {"name": "valid", "dtype": "bool"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "test", "num_bytes": 770772, "num_examples": 859}], "download_size": 202166, "dataset_size": 770772}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T00:20:12+00:00 |
|
56e84045140d9331146cbdccf7d2dbabfa033b57 | rocioadlc/data4 | [
"license:apache-2.0",
"region:us"
] | 2024-02-16T23:29:39+00:00 | {"license": "apache-2.0"} | 2024-02-17T01:35:07+00:00 |
|
56994ba4b6b3a96dd328c64f4b3f4aa12b47a8a7 | TheGreatP/vozjoaoV15 | [
"license:openrail",
"region:us"
] | 2024-02-16T23:33:51+00:00 | {"license": "openrail"} | 2024-02-16T23:42:46+00:00 |
|
e30a1c8e035abdbb5dd766350bba13bfbb692bf6 |
# Test
This dataset is a test. | dwancin/test | [
"license:mit",
"region:us"
] | 2024-02-16T23:34:14+00:00 | {"license": "mit"} | 2024-02-16T23:36:02+00:00 |
6634d086a687d651240581166b4668e4f50a6e07 | zekeZZ/hh-rlhf-dpo | [
"region:us"
] | 2024-02-16T23:34:16+00:00 | {"dataset_info": {"features": [{"name": "chosen", "dtype": "string"}, {"name": "rejected", "dtype": "string"}, {"name": "prompt", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 38071901, "num_examples": 49391}, {"name": "test", "num_bytes": 989565, "num_examples": 2466}], "download_size": 24318977, "dataset_size": 39061466}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T04:00:44+00:00 |
|
af457ab1d2ecfe1affba047e985a28896c6f6d0c |
# ms_marco_japanese
- [ms_marco](https://huggingface.co/datasets/ms_marco) の日本語翻訳データです。
- 翻訳には、[google/madlad400-3b-mt](https://huggingface.co/google/madlad400-3b-mt)を利用しています。
- HuggingFace で公開されている、ms_marco と同等の構造で保存しています。
- 翻訳品質はそれほど高くありません。繁体字などが含まれるデータもあります。Google Translate API を用いて翻訳された、マルチリンガルms_marcoデータセットである、[mMARCO](https://github.com/unicamp-dl/mMARCO)の方が品質が高いです。そのため、このデータセットを利用の際は、他の翻訳データセットとの比較をお勧めします。
- wellFormedAnswers カラムは翻訳していません
- 翻訳にかかった時間は、高速化のため[santhosh/madlad400-3b-ct2](https://huggingface.co/santhosh/madlad400-3b-ct2)を利用し、対象のデータ約1000万文に対して RTX3090 で8日ほどでした。
## 利用方法
```
from datasets import load_dataset
train_ds = load_dataset("hotchpotch/ms_marco_japanese", "v2.1-madlad400-3b", split="train")
validation_ds = load_dataset("hotchpotch/ms_marco_japanese", "v2.1-madlad400-3b", split="validation")
test_ds = load_dataset("hotchpotch/ms_marco_japanese", "v2.1-madlad400-3b", split="test"
```
```
print(train_ds[0])
{'answers': ['マンハッタン計画の成功が直接的にもたらした影響は、原子力研究者や技術員達による素晴しい業績を覆い隠す唯一な雲であった。その成果と真実であるもの:何十万という無辜なる命々があきれていたことだろうか?'], 'passages': {'is_selected': [1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'passage_text': ['科学者の間でコミュニケーションが行われることは、マンハッタン計画を成功させるために重要であった。原子力研究家や技術員たちによって達成された素晴らしい業績には雲だけがあふれているものだろうか?その実際的な意味と言えば何十万という無辜なる人々へ生命も犠牲になっていることですね!', 'マンハッタン計画とその原子爆弾は第二次世界大戦の終結に寄与し、平和的な目標をもって核エネルギーが利用されたことで歴史や科学界には影響力があった。', 'マンハッタン計画は原子爆弾の製造が可能かどうかなんて見るために始められた。このプロジェクトを成功させれば、世界には永遠な変化がありそこまで強力で人工的であることも知らしむことになっただろいますからね.', 'マンハッタン計画(Manhattan Project)は、第二次世界大戦中にアメリカ合衆国で行われた原子爆弾開発プロジェクトの名称。特別には1942年から翌日までレスリー・R. グローブズ将軍が指揮する米陸军工兵隊によって実施されたものをいうことが多かったのである 。', 'また、各巻のバージョンと補完的なウェブサイトもある。最初に作られたのは『マンハッタン計画: インタラクティヴ・ヒストリー』であり([http://www.cfo-doe/me70_history)歴史遺産資源局および国家核安全保障庁によるものだったが現在では全て廃止されています(https//en](http://www.cfo-doe/me70_history)%E6%AD%B4%E5%8F%B2%E9%81%BA%E7%94%A3%E8%B3%87%E6%BA%90%E5%B1%80%E3%81%8A%E3%82%88%E3%81%B3%E5%9B%BD%E5%AE%B6%E6%A0%B8%E5%AE%89%E5%85%A8%E4%BF%9D%E9%9A%9C%E5%BA%81%E3%81%AB%E3%82%88%E3%82%8B%E3%82%82%E3%81%AE%E3%81%A0%E3%81%A3%E3%81%9F%E3%81%8C%E7%8F%BE%E5%9C%A8%E3%81%A7%E3%81%AF%E5%85%A8%E3%81%A6%E5%BB%83%E6%AD%A2%E3%81%95%E3%82%8C%E3%81%A6%E3%81%84%E3%81%BE%E3%81%99(https//en))', '原子爆弾は、1945年7月にニューメキシコ州の砂漠で初めて実験的な核兵器として使用された。その後も多くが開発され続けたものだったのである(マンハッタン計画)。', 'また、原爆や第二次世界大戦の終結に関する非常によく豊富な文献を置き換える試みもない。本コレクションはマンハッタン計画について起源と発展が記録されることには努めていませんのである 。', 'マンハッタン計画(Manhattan Project)は、第二次世界大戦中に最初の核兵器を生産した研究開発事業である。イギリスとカナダによる支援下アメリカ合衆国が主導していたものだった 。1942年から1946年代までこのプロジェクトには米陸軍工廠少将レスリー・グローブス (Leslie Groves) (英語版 )(en:Lesley G.Grove, US Army Corp of Engineer), ロサンゼル斯原子力実験場所長ロバート·オペンハーマーらも参加しており,その間爆弾設計者として活躍していることでも知られていたのであり ,また彼等自身について言及する必要性があると考えている人物であることなどよりこれ以上詳細な情報ではないかという意見がありました', '1942年6月、アメリカ陸軍工兵隊はマンハッタン計画を開始した。原子爆弾の秘密名称であるが.', 'マンハッタン計画のB炉がハンフォードに建設される理由は、北アメリカ沿岸から太平洋へ流れ込む最大級河川であるコロンビア湖と近いことだった。'], 'url': ['[http://www.pitt.edu/~sdb14/atombomb.html](http://www.pitt.edu/~sdb14/atombomb.html)', '[http://www.osti.gov/accomplishments/manhattan_story.html](http://www.osti.gov/accomplishments/manhattan_story.html)', '[http://www.123helpme.com/impact-of-the-manhattan-project-preview.asp?id=177337](http://www.123helpme.com/impact-of-the-manhattan-project-preview.asp?id=177337)', '[http://www.answers.com/Q/How_did_the_Manhattan_Project_impact_on_society](http://www.answers.com/Q/How_did_the_Manhattan_Project_impact_on_society)', '[https://www.osti.gov/manhattan-project-history/publications/Manhattan_Project_2010.pdf](https://www.osti.gov/manhattan-project-history/publications/Manhattan_Project_2010.pdf)', '[http://www.ushistory.org/us/51f.asp](http://www.ushistory.org/us/51f.asp)', '[http://nsarchive.gwu.edu/NSAEBB/NSAEBB162](http://nsarchive.gwu.edu/NSAEBB/NSAEBB162)', '[https://en.wikipedia.org/wiki/Manhattan_Project](https://en.wikipedia.org/wiki/Manhattan_Project)', '[https://quizlet.com/41456230/a-bomb-flash-cards/](https://quizlet.com/41456230/a-bomb-flash-cards/)', '[https://www.atomicheritage.org/history/environmental-consequences](https://www.atomicheritage.org/history/environmental-consequences)']}, 'query': '(マンハッタン計画の成功が直接的にもたらした影響は何でしょうか。', 'query_id': 1185869, 'query_type': 'DESCRIPTION', 'wellFormedAnswers': []}
```
## ライセンス
- ms_marco と同等とします。 | hotchpotch/ms_marco_japanese | [
"license:other",
"region:us"
] | 2024-02-16T23:37:22+00:00 | {"license": "other", "license_name": "same-ms-marco", "license_link": "https://huggingface.co/datasets/ms_marco", "dataset_info": {"config_name": "v2.1-madlad400-3b", "features": [{"name": "answers", "sequence": "string"}, {"name": "passages", "sequence": [{"name": "is_selected", "dtype": "int32"}, {"name": "passage_text", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "query", "dtype": "string"}, {"name": "query_id", "dtype": "int32"}, {"name": "query_type", "dtype": "string"}, {"name": "wellFormedAnswers", "sequence": "string"}], "splits": [{"name": "validation", "num_bytes": 440690468, "num_examples": 101093}, {"name": "train", "num_bytes": 3590508080, "num_examples": 808731}, {"name": "test", "num_bytes": 430765349, "num_examples": 101092}], "download_size": 2491144245, "dataset_size": 4461963897}, "configs": [{"config_name": "v2.1-madlad400-3b", "data_files": [{"split": "validation", "path": "v2.1-madlad400-3b/validation-*"}, {"split": "train", "path": "v2.1-madlad400-3b/train-*"}, {"split": "test", "path": "v2.1-madlad400-3b/test-*"}]}]} | 2024-02-17T01:00:50+00:00 |
14d6dbb3c2af6cf28b7a5edb0ff902dde2c64ea3 | ramixpe/bgp_questions_v3 | [
"region:us"
] | 2024-02-16T23:41:21+00:00 | {"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1209974, "num_examples": 692}], "download_size": 499547, "dataset_size": 1209974}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-16T23:41:22+00:00 |
|
396bb592ef23ae10b89bbf68e13ef9a8d4532d29 | plutokokoa/translation-for-yu-gi-oh-ja-traditional-zh | [
"license:apache-2.0",
"region:us"
] | 2024-02-16T23:44:15+00:00 | {"license": "apache-2.0", "dataset_info": {"features": [{"name": "jp", "dtype": "string"}, {"name": "ch", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 7474238, "num_examples": 10536}], "download_size": 2293121, "dataset_size": 7474238}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T00:19:51+00:00 |
|
2b931cd2fed354dda0e6c87ab4baa50ec3fa50d3 | MatsuoDochiai/LUISA | [
"license:openrail",
"region:us"
] | 2024-02-16T23:48:01+00:00 | {"license": "openrail"} | 2024-02-16T23:49:20+00:00 |
|
c9f8d895ed83d2994a6dd1545bcea830204f1887 | RICLDS1972/marlonloc | [
"license:openrail",
"region:us"
] | 2024-02-16T23:49:32+00:00 | {"license": "openrail"} | 2024-02-16T23:54:38+00:00 |
|
1385963f7a1218134dcb6617d4395e177fbb15ec | raiderkkkkkkkkk/sonicmoderno | [
"region:us"
] | 2024-02-17T00:02:20+00:00 | {} | 2024-02-17T00:05:15+00:00 |
|
9fea06f62992f0b1486311d02272955ef70657d2 | firstgradeai/ytrends2 | [
"region:us"
] | 2024-02-17T00:04:19+00:00 | {} | 2024-02-17T00:04:19+00:00 |
|
8ca036f320b37343d30e56dd9fe430bf86018d47 | firstgradeai/ytrends3 | [
"region:us"
] | 2024-02-17T00:04:53+00:00 | {} | 2024-02-17T00:14:00+00:00 |
|
52adddf20f7c93f898760a14abf5d5ae595b24ab | vishruthnath/Calc-mawps-Tagged-testing | [
"region:us"
] | 2024-02-17T00:14:57+00:00 | {"dataset_info": {"features": [{"name": "chain", "dtype": "string"}, {"name": "equation", "dtype": "string"}, {"name": "expression", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "num_unique_ops", "dtype": "int64"}, {"name": "operand", "sequence": "float64"}, {"name": "operand_tags", "sequence": "int64"}, {"name": "operation", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "question_split", "sequence": "string"}, {"name": "result", "dtype": "string"}, {"name": "result_float", "dtype": "float64"}, {"name": "valid", "dtype": "bool"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 680417, "num_examples": 1039}, {"name": "validation", "num_bytes": 656088, "num_examples": 1010}, {"name": "test", "num_bytes": 330740, "num_examples": 505}], "download_size": 463338, "dataset_size": 1667245}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T00:15:23+00:00 |
|
b755c5fa9be8e6c668827a3419bee5330d04c5ba | vishruthnath/Calc-svamp-Tagged-testing | [
"region:us"
] | 2024-02-17T00:15:53+00:00 | {"dataset_info": {"features": [{"name": "chain", "dtype": "string"}, {"name": "equation", "dtype": "string"}, {"name": "id", "dtype": "string"}, {"name": "num_unique_ops", "dtype": "int64"}, {"name": "operand", "sequence": "float64"}, {"name": "operand_tags", "sequence": "int64"}, {"name": "operation", "dtype": "string"}, {"name": "problem_type", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "question_split", "sequence": "string"}, {"name": "result", "dtype": "string"}, {"name": "result_float", "dtype": "float64"}, {"name": "valid", "dtype": "bool"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "test", "num_bytes": 723143, "num_examples": 811}], "download_size": 156032, "dataset_size": 723143}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T00:16:04+00:00 |
|
660ea158fa5d5507447ed695366b063f98dce6d3 |
# Dataset Card for Evaluation run of Yuma42/KangalKhan-Ruby-7B-Fixed
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Yuma42/KangalKhan-Ruby-7B-Fixed](https://huggingface.co/Yuma42/KangalKhan-Ruby-7B-Fixed) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_Yuma42__KangalKhan-Ruby-7B-Fixed",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T00:20:49.422540](https://huggingface.co/datasets/open-llm-leaderboard/details_Yuma42__KangalKhan-Ruby-7B-Fixed/blob/main/results_2024-02-17T00-20-49.422540.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6347473013166217,
"acc_stderr": 0.032259927653459516,
"acc_norm": 0.6365178163764577,
"acc_norm_stderr": 0.03290211517224385,
"mc1": 0.38922888616891066,
"mc1_stderr": 0.01706855268069033,
"mc2": 0.5648879973341684,
"mc2_stderr": 0.01540236564556069
},
"harness|arc:challenge|25": {
"acc": 0.6220136518771331,
"acc_stderr": 0.014169664520303098,
"acc_norm": 0.6723549488054608,
"acc_norm_stderr": 0.013715847940719337
},
"harness|hellaswag|10": {
"acc": 0.6683927504481179,
"acc_stderr": 0.004698285350019216,
"acc_norm": 0.8522206731726748,
"acc_norm_stderr": 0.00354155826377909
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5851851851851851,
"acc_stderr": 0.04256193767901408,
"acc_norm": 0.5851851851851851,
"acc_norm_stderr": 0.04256193767901408
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6973684210526315,
"acc_stderr": 0.03738520676119669,
"acc_norm": 0.6973684210526315,
"acc_norm_stderr": 0.03738520676119669
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.59,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.59,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6867924528301886,
"acc_stderr": 0.028544793319055326,
"acc_norm": 0.6867924528301886,
"acc_norm_stderr": 0.028544793319055326
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.75,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.75,
"acc_norm_stderr": 0.03621034121889507
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5953757225433526,
"acc_stderr": 0.03742461193887248,
"acc_norm": 0.5953757225433526,
"acc_norm_stderr": 0.03742461193887248
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3627450980392157,
"acc_stderr": 0.04784060704105653,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.04784060704105653
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.76,
"acc_norm_stderr": 0.042923469599092816
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5617021276595745,
"acc_stderr": 0.03243618636108101,
"acc_norm": 0.5617021276595745,
"acc_norm_stderr": 0.03243618636108101
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4824561403508772,
"acc_stderr": 0.0470070803355104,
"acc_norm": 0.4824561403508772,
"acc_norm_stderr": 0.0470070803355104
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5241379310344828,
"acc_stderr": 0.0416180850350153,
"acc_norm": 0.5241379310344828,
"acc_norm_stderr": 0.0416180850350153
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.025331202438944433,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.025331202438944433
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4603174603174603,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.4603174603174603,
"acc_norm_stderr": 0.04458029125470973
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7870967741935484,
"acc_stderr": 0.023287665127268552,
"acc_norm": 0.7870967741935484,
"acc_norm_stderr": 0.023287665127268552
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5123152709359606,
"acc_stderr": 0.035169204442208966,
"acc_norm": 0.5123152709359606,
"acc_norm_stderr": 0.035169204442208966
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.65,
"acc_stderr": 0.04793724854411019,
"acc_norm": 0.65,
"acc_norm_stderr": 0.04793724854411019
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7828282828282829,
"acc_stderr": 0.029376616484945633,
"acc_norm": 0.7828282828282829,
"acc_norm_stderr": 0.029376616484945633
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.022935144053919443,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.022935144053919443
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6102564102564103,
"acc_stderr": 0.024726967886647074,
"acc_norm": 0.6102564102564103,
"acc_norm_stderr": 0.024726967886647074
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.02822644674968352,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.02822644674968352
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6848739495798319,
"acc_stderr": 0.030176808288974337,
"acc_norm": 0.6848739495798319,
"acc_norm_stderr": 0.030176808288974337
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.32450331125827814,
"acc_stderr": 0.03822746937658752,
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.03822746937658752
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8385321100917431,
"acc_stderr": 0.015776239256163227,
"acc_norm": 0.8385321100917431,
"acc_norm_stderr": 0.015776239256163227
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5,
"acc_stderr": 0.034099716973523674,
"acc_norm": 0.5,
"acc_norm_stderr": 0.034099716973523674
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.803921568627451,
"acc_stderr": 0.027865942286639318,
"acc_norm": 0.803921568627451,
"acc_norm_stderr": 0.027865942286639318
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8059071729957806,
"acc_stderr": 0.025744902532290913,
"acc_norm": 0.8059071729957806,
"acc_norm_stderr": 0.025744902532290913
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.695067264573991,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.695067264573991,
"acc_norm_stderr": 0.030898610882477515
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7786259541984732,
"acc_stderr": 0.03641297081313728,
"acc_norm": 0.7786259541984732,
"acc_norm_stderr": 0.03641297081313728
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7603305785123967,
"acc_stderr": 0.03896878985070416,
"acc_norm": 0.7603305785123967,
"acc_norm_stderr": 0.03896878985070416
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7870370370370371,
"acc_stderr": 0.0395783547198098,
"acc_norm": 0.7870370370370371,
"acc_norm_stderr": 0.0395783547198098
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7852760736196319,
"acc_stderr": 0.032262193772867744,
"acc_norm": 0.7852760736196319,
"acc_norm_stderr": 0.032262193772867744
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.49107142857142855,
"acc_stderr": 0.04745033255489123,
"acc_norm": 0.49107142857142855,
"acc_norm_stderr": 0.04745033255489123
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.022801382534597528,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.022801382534597528
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8263090676883781,
"acc_stderr": 0.01354741565866226,
"acc_norm": 0.8263090676883781,
"acc_norm_stderr": 0.01354741565866226
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7138728323699421,
"acc_stderr": 0.02433214677913413,
"acc_norm": 0.7138728323699421,
"acc_norm_stderr": 0.02433214677913413
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3307262569832402,
"acc_stderr": 0.01573502625896612,
"acc_norm": 0.3307262569832402,
"acc_norm_stderr": 0.01573502625896612
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7418300653594772,
"acc_stderr": 0.02505850331695814,
"acc_norm": 0.7418300653594772,
"acc_norm_stderr": 0.02505850331695814
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6913183279742765,
"acc_stderr": 0.026236965881153266,
"acc_norm": 0.6913183279742765,
"acc_norm_stderr": 0.026236965881153266
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7438271604938271,
"acc_stderr": 0.024288533637726095,
"acc_norm": 0.7438271604938271,
"acc_norm_stderr": 0.024288533637726095
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5,
"acc_stderr": 0.029827499313594685,
"acc_norm": 0.5,
"acc_norm_stderr": 0.029827499313594685
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.47327249022164275,
"acc_stderr": 0.012751977967676008,
"acc_norm": 0.47327249022164275,
"acc_norm_stderr": 0.012751977967676008
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.02841820861940676,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.02841820861940676
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6748366013071896,
"acc_stderr": 0.01895088677080631,
"acc_norm": 0.6748366013071896,
"acc_norm_stderr": 0.01895088677080631
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.04494290866252091,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.04494290866252091
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7387755102040816,
"acc_stderr": 0.028123429335142773,
"acc_norm": 0.7387755102040816,
"acc_norm_stderr": 0.028123429335142773
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8109452736318408,
"acc_stderr": 0.027686913588013003,
"acc_norm": 0.8109452736318408,
"acc_norm_stderr": 0.027686913588013003
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.0358870281282637,
"acc_norm": 0.85,
"acc_norm_stderr": 0.0358870281282637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5662650602409639,
"acc_stderr": 0.03858158940685517,
"acc_norm": 0.5662650602409639,
"acc_norm_stderr": 0.03858158940685517
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8304093567251462,
"acc_stderr": 0.02878210810540171,
"acc_norm": 0.8304093567251462,
"acc_norm_stderr": 0.02878210810540171
},
"harness|truthfulqa:mc|0": {
"mc1": 0.38922888616891066,
"mc1_stderr": 0.01706855268069033,
"mc2": 0.5648879973341684,
"mc2_stderr": 0.01540236564556069
},
"harness|winogrande|5": {
"acc": 0.7797947908445146,
"acc_stderr": 0.011646276755089693
},
"harness|gsm8k|5": {
"acc": 0.6194086429112965,
"acc_stderr": 0.013373971277729818
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_Yuma42__KangalKhan-Ruby-7B-Fixed | [
"region:us"
] | 2024-02-17T00:23:08+00:00 | {"pretty_name": "Evaluation run of Yuma42/KangalKhan-Ruby-7B-Fixed", "dataset_summary": "Dataset automatically created during the evaluation run of model [Yuma42/KangalKhan-Ruby-7B-Fixed](https://huggingface.co/Yuma42/KangalKhan-Ruby-7B-Fixed) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Yuma42__KangalKhan-Ruby-7B-Fixed\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T00:20:49.422540](https://huggingface.co/datasets/open-llm-leaderboard/details_Yuma42__KangalKhan-Ruby-7B-Fixed/blob/main/results_2024-02-17T00-20-49.422540.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6347473013166217,\n \"acc_stderr\": 0.032259927653459516,\n \"acc_norm\": 0.6365178163764577,\n \"acc_norm_stderr\": 0.03290211517224385,\n \"mc1\": 0.38922888616891066,\n \"mc1_stderr\": 0.01706855268069033,\n \"mc2\": 0.5648879973341684,\n \"mc2_stderr\": 0.01540236564556069\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6220136518771331,\n \"acc_stderr\": 0.014169664520303098,\n \"acc_norm\": 0.6723549488054608,\n \"acc_norm_stderr\": 0.013715847940719337\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6683927504481179,\n \"acc_stderr\": 0.004698285350019216,\n \"acc_norm\": 0.8522206731726748,\n \"acc_norm_stderr\": 0.00354155826377909\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119669,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119669\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105653,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105653\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.0470070803355104,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.0470070803355104\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944433,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944433\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268552,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268552\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945633,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945633\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6102564102564103,\n \"acc_stderr\": 0.024726967886647074,\n \"acc_norm\": 0.6102564102564103,\n \"acc_norm_stderr\": 0.024726967886647074\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.02822644674968352,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.02822644674968352\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658752,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658752\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163227,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163227\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.803921568627451,\n \"acc_stderr\": 0.027865942286639318,\n \"acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639318\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290913,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290913\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313728,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313728\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.01354741565866226,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.01354741565866226\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.02433214677913413,\n \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.02433214677913413\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3307262569832402,\n \"acc_stderr\": 0.01573502625896612,\n \"acc_norm\": 0.3307262569832402,\n \"acc_norm_stderr\": 0.01573502625896612\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7418300653594772,\n \"acc_stderr\": 0.02505850331695814,\n \"acc_norm\": 0.7418300653594772,\n \"acc_norm_stderr\": 0.02505850331695814\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6913183279742765,\n \"acc_stderr\": 0.026236965881153266,\n \"acc_norm\": 0.6913183279742765,\n \"acc_norm_stderr\": 0.026236965881153266\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.024288533637726095,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.024288533637726095\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.012751977967676008,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.012751977967676008\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6748366013071896,\n \"acc_stderr\": 0.01895088677080631,\n \"acc_norm\": 0.6748366013071896,\n \"acc_norm_stderr\": 0.01895088677080631\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142773,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142773\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n \"acc_stderr\": 0.027686913588013003,\n \"acc_norm\": 0.8109452736318408,\n \"acc_norm_stderr\": 0.027686913588013003\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685517,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685517\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38922888616891066,\n \"mc1_stderr\": 0.01706855268069033,\n \"mc2\": 0.5648879973341684,\n \"mc2_stderr\": 0.01540236564556069\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7797947908445146,\n \"acc_stderr\": 0.011646276755089693\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6194086429112965,\n \"acc_stderr\": 0.013373971277729818\n }\n}\n```", "repo_url": "https://huggingface.co/Yuma42/KangalKhan-Ruby-7B-Fixed", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-20-49.422540.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["**/details_harness|winogrande|5_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T00-20-49.422540.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T00_20_49.422540", "path": ["results_2024-02-17T00-20-49.422540.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T00-20-49.422540.parquet"]}]}]} | 2024-02-17T00:23:32+00:00 |
5101f04e961b12bf8db5111585f147a795f19916 |
# Dataset Card for Evaluation run of jeiku/Cookie_7B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [jeiku/Cookie_7B](https://huggingface.co/jeiku/Cookie_7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_jeiku__Cookie_7B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T00:21:45.959538](https://huggingface.co/datasets/open-llm-leaderboard/details_jeiku__Cookie_7B/blob/main/results_2024-02-17T00-21-45.959538.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6489384774347392,
"acc_stderr": 0.032131421920616465,
"acc_norm": 0.6498781521461111,
"acc_norm_stderr": 0.032783132740631354,
"mc1": 0.5128518971848225,
"mc1_stderr": 0.01749771794429982,
"mc2": 0.6687534212220169,
"mc2_stderr": 0.015263939252034519
},
"harness|arc:challenge|25": {
"acc": 0.6791808873720137,
"acc_stderr": 0.01364094309194653,
"acc_norm": 0.697098976109215,
"acc_norm_stderr": 0.013428241573185349
},
"harness|hellaswag|10": {
"acc": 0.7105158334993029,
"acc_stderr": 0.004525960965551707,
"acc_norm": 0.8757219677355108,
"acc_norm_stderr": 0.0032922425436373417
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6222222222222222,
"acc_stderr": 0.04188307537595853,
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.04188307537595853
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7171052631578947,
"acc_stderr": 0.03665349695640767,
"acc_norm": 0.7171052631578947,
"acc_norm_stderr": 0.03665349695640767
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7132075471698113,
"acc_stderr": 0.027834912527544067,
"acc_norm": 0.7132075471698113,
"acc_norm_stderr": 0.027834912527544067
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7430555555555556,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.7430555555555556,
"acc_norm_stderr": 0.03653946969442099
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6705202312138728,
"acc_stderr": 0.03583901754736412,
"acc_norm": 0.6705202312138728,
"acc_norm_stderr": 0.03583901754736412
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107224,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107224
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.76,
"acc_norm_stderr": 0.042923469599092816
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.574468085106383,
"acc_stderr": 0.03232146916224469,
"acc_norm": 0.574468085106383,
"acc_norm_stderr": 0.03232146916224469
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4824561403508772,
"acc_stderr": 0.04700708033551038,
"acc_norm": 0.4824561403508772,
"acc_norm_stderr": 0.04700708033551038
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5379310344827586,
"acc_stderr": 0.04154659671707548,
"acc_norm": 0.5379310344827586,
"acc_norm_stderr": 0.04154659671707548
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.02533120243894443,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.02533120243894443
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4126984126984127,
"acc_stderr": 0.04403438954768177,
"acc_norm": 0.4126984126984127,
"acc_norm_stderr": 0.04403438954768177
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7870967741935484,
"acc_stderr": 0.023287665127268545,
"acc_norm": 0.7870967741935484,
"acc_norm_stderr": 0.023287665127268545
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5320197044334976,
"acc_stderr": 0.03510766597959215,
"acc_norm": 0.5320197044334976,
"acc_norm_stderr": 0.03510766597959215
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.797979797979798,
"acc_stderr": 0.028606204289229865,
"acc_norm": 0.797979797979798,
"acc_norm_stderr": 0.028606204289229865
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8756476683937824,
"acc_stderr": 0.023814477086593552,
"acc_norm": 0.8756476683937824,
"acc_norm_stderr": 0.023814477086593552
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6615384615384615,
"acc_stderr": 0.023991500500313036,
"acc_norm": 0.6615384615384615,
"acc_norm_stderr": 0.023991500500313036
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.337037037037037,
"acc_stderr": 0.02882088466625326,
"acc_norm": 0.337037037037037,
"acc_norm_stderr": 0.02882088466625326
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6932773109243697,
"acc_stderr": 0.029953823891887027,
"acc_norm": 0.6932773109243697,
"acc_norm_stderr": 0.029953823891887027
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.37748344370860926,
"acc_stderr": 0.03958027231121569,
"acc_norm": 0.37748344370860926,
"acc_norm_stderr": 0.03958027231121569
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8495412844036697,
"acc_stderr": 0.015328563932669237,
"acc_norm": 0.8495412844036697,
"acc_norm_stderr": 0.015328563932669237
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5231481481481481,
"acc_stderr": 0.03406315360711507,
"acc_norm": 0.5231481481481481,
"acc_norm_stderr": 0.03406315360711507
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8333333333333334,
"acc_stderr": 0.026156867523931045,
"acc_norm": 0.8333333333333334,
"acc_norm_stderr": 0.026156867523931045
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8185654008438819,
"acc_stderr": 0.02508596114457966,
"acc_norm": 0.8185654008438819,
"acc_norm_stderr": 0.02508596114457966
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.695067264573991,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.695067264573991,
"acc_norm_stderr": 0.030898610882477515
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8015267175572519,
"acc_stderr": 0.034981493854624714,
"acc_norm": 0.8015267175572519,
"acc_norm_stderr": 0.034981493854624714
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7851239669421488,
"acc_stderr": 0.037494924487096966,
"acc_norm": 0.7851239669421488,
"acc_norm_stderr": 0.037494924487096966
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.038260763248848646,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.038260763248848646
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7668711656441718,
"acc_stderr": 0.0332201579577674,
"acc_norm": 0.7668711656441718,
"acc_norm_stderr": 0.0332201579577674
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4375,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04708567521880525
},
"harness|hendrycksTest-management|5": {
"acc": 0.8058252427184466,
"acc_stderr": 0.03916667762822585,
"acc_norm": 0.8058252427184466,
"acc_norm_stderr": 0.03916667762822585
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8760683760683761,
"acc_stderr": 0.021586494001281376,
"acc_norm": 0.8760683760683761,
"acc_norm_stderr": 0.021586494001281376
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8275862068965517,
"acc_stderr": 0.013507943909371803,
"acc_norm": 0.8275862068965517,
"acc_norm_stderr": 0.013507943909371803
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7283236994219653,
"acc_stderr": 0.023948512905468358,
"acc_norm": 0.7283236994219653,
"acc_norm_stderr": 0.023948512905468358
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.46368715083798884,
"acc_stderr": 0.016678341894533166,
"acc_norm": 0.46368715083798884,
"acc_norm_stderr": 0.016678341894533166
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.738562091503268,
"acc_stderr": 0.025160998214292452,
"acc_norm": 0.738562091503268,
"acc_norm_stderr": 0.025160998214292452
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7202572347266881,
"acc_stderr": 0.025494259350694912,
"acc_norm": 0.7202572347266881,
"acc_norm_stderr": 0.025494259350694912
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7314814814814815,
"acc_stderr": 0.024659685185967284,
"acc_norm": 0.7314814814814815,
"acc_norm_stderr": 0.024659685185967284
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4645390070921986,
"acc_stderr": 0.029752389657427047,
"acc_norm": 0.4645390070921986,
"acc_norm_stderr": 0.029752389657427047
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.45697522816166886,
"acc_stderr": 0.012722869501611419,
"acc_norm": 0.45697522816166886,
"acc_norm_stderr": 0.012722869501611419
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6875,
"acc_stderr": 0.02815637344037142,
"acc_norm": 0.6875,
"acc_norm_stderr": 0.02815637344037142
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6470588235294118,
"acc_stderr": 0.01933314202079716,
"acc_norm": 0.6470588235294118,
"acc_norm_stderr": 0.01933314202079716
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7224489795918367,
"acc_stderr": 0.028666857790274645,
"acc_norm": 0.7224489795918367,
"acc_norm_stderr": 0.028666857790274645
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8507462686567164,
"acc_stderr": 0.02519692987482707,
"acc_norm": 0.8507462686567164,
"acc_norm_stderr": 0.02519692987482707
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.82,
"acc_stderr": 0.038612291966536955,
"acc_norm": 0.82,
"acc_norm_stderr": 0.038612291966536955
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835817,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835817
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.847953216374269,
"acc_stderr": 0.027539122889061456,
"acc_norm": 0.847953216374269,
"acc_norm_stderr": 0.027539122889061456
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5128518971848225,
"mc1_stderr": 0.01749771794429982,
"mc2": 0.6687534212220169,
"mc2_stderr": 0.015263939252034519
},
"harness|winogrande|5": {
"acc": 0.813733228097869,
"acc_stderr": 0.010941877955676207
},
"harness|gsm8k|5": {
"acc": 0.6118271417740713,
"acc_stderr": 0.013423607564002757
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_jeiku__Cookie_7B | [
"region:us"
] | 2024-02-17T00:24:04+00:00 | {"pretty_name": "Evaluation run of jeiku/Cookie_7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [jeiku/Cookie_7B](https://huggingface.co/jeiku/Cookie_7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jeiku__Cookie_7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T00:21:45.959538](https://huggingface.co/datasets/open-llm-leaderboard/details_jeiku__Cookie_7B/blob/main/results_2024-02-17T00-21-45.959538.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6489384774347392,\n \"acc_stderr\": 0.032131421920616465,\n \"acc_norm\": 0.6498781521461111,\n \"acc_norm_stderr\": 0.032783132740631354,\n \"mc1\": 0.5128518971848225,\n \"mc1_stderr\": 0.01749771794429982,\n \"mc2\": 0.6687534212220169,\n \"mc2_stderr\": 0.015263939252034519\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6791808873720137,\n \"acc_stderr\": 0.01364094309194653,\n \"acc_norm\": 0.697098976109215,\n \"acc_norm_stderr\": 0.013428241573185349\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7105158334993029,\n \"acc_stderr\": 0.004525960965551707,\n \"acc_norm\": 0.8757219677355108,\n \"acc_norm_stderr\": 0.0032922425436373417\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7171052631578947,\n \"acc_stderr\": 0.03665349695640767,\n \"acc_norm\": 0.7171052631578947,\n \"acc_norm_stderr\": 0.03665349695640767\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544067,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544067\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224469,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224469\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.02533120243894443,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.02533120243894443\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.04403438954768177,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.04403438954768177\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268545,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268545\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5320197044334976,\n \"acc_stderr\": 0.03510766597959215,\n \"acc_norm\": 0.5320197044334976,\n \"acc_norm_stderr\": 0.03510766597959215\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.797979797979798,\n \"acc_stderr\": 0.028606204289229865,\n \"acc_norm\": 0.797979797979798,\n \"acc_norm_stderr\": 0.028606204289229865\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.023814477086593552,\n \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.023814477086593552\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6615384615384615,\n \"acc_stderr\": 0.023991500500313036,\n \"acc_norm\": 0.6615384615384615,\n \"acc_norm_stderr\": 0.023991500500313036\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.02882088466625326,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.02882088466625326\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6932773109243697,\n \"acc_stderr\": 0.029953823891887027,\n \"acc_norm\": 0.6932773109243697,\n \"acc_norm_stderr\": 0.029953823891887027\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8495412844036697,\n \"acc_stderr\": 0.015328563932669237,\n \"acc_norm\": 0.8495412844036697,\n \"acc_norm_stderr\": 0.015328563932669237\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931045,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931045\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.02508596114457966,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.02508596114457966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.034981493854624714,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.034981493854624714\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822585,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822585\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281376,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281376\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371803,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371803\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468358,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468358\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.46368715083798884,\n \"acc_stderr\": 0.016678341894533166,\n \"acc_norm\": 0.46368715083798884,\n \"acc_norm_stderr\": 0.016678341894533166\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292452,\n \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292452\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694912,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694912\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.024659685185967284,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.024659685185967284\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.45697522816166886,\n \"acc_stderr\": 0.012722869501611419,\n \"acc_norm\": 0.45697522816166886,\n \"acc_norm_stderr\": 0.012722869501611419\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.01933314202079716,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.01933314202079716\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274645,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274645\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n \"acc_stderr\": 0.02519692987482707,\n \"acc_norm\": 0.8507462686567164,\n \"acc_norm_stderr\": 0.02519692987482707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.027539122889061456,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.027539122889061456\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5128518971848225,\n \"mc1_stderr\": 0.01749771794429982,\n \"mc2\": 0.6687534212220169,\n \"mc2_stderr\": 0.015263939252034519\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.813733228097869,\n \"acc_stderr\": 0.010941877955676207\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6118271417740713,\n \"acc_stderr\": 0.013423607564002757\n }\n}\n```", "repo_url": "https://huggingface.co/jeiku/Cookie_7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-21-45.959538.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["**/details_harness|winogrande|5_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T00-21-45.959538.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T00_21_45.959538", "path": ["results_2024-02-17T00-21-45.959538.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T00-21-45.959538.parquet"]}]}]} | 2024-02-17T00:24:28+00:00 |
1953844eb698af211069a78ce94e32be41ee1b63 |
# Dataset Card for Evaluation run of fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s](https://huggingface.co/fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T00:25:52.922442](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s/blob/main/results_2024-02-17T00-25-52.922442.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6135127589001832,
"acc_stderr": 0.032796760940438034,
"acc_norm": 0.6157670560754505,
"acc_norm_stderr": 0.033451817306662635,
"mc1": 0.37454100367197063,
"mc1_stderr": 0.016943535128405327,
"mc2": 0.5477195184186756,
"mc2_stderr": 0.015358664393160576
},
"harness|arc:challenge|25": {
"acc": 0.5930034129692833,
"acc_stderr": 0.01435639941800912,
"acc_norm": 0.6407849829351536,
"acc_norm_stderr": 0.014020224155839162
},
"harness|hellaswag|10": {
"acc": 0.6493726349332802,
"acc_stderr": 0.00476191251170751,
"acc_norm": 0.841167098187612,
"acc_norm_stderr": 0.003647731723938848
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6074074074074074,
"acc_stderr": 0.0421850621536888,
"acc_norm": 0.6074074074074074,
"acc_norm_stderr": 0.0421850621536888
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6447368421052632,
"acc_stderr": 0.03894734487013316,
"acc_norm": 0.6447368421052632,
"acc_norm_stderr": 0.03894734487013316
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.690566037735849,
"acc_stderr": 0.028450154794118637,
"acc_norm": 0.690566037735849,
"acc_norm_stderr": 0.028450154794118637
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7569444444444444,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.7569444444444444,
"acc_norm_stderr": 0.03586879280080341
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909282,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909282
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.0368122963339432,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.0368122963339432
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201942,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201942
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5361702127659574,
"acc_stderr": 0.03260038511835771,
"acc_norm": 0.5361702127659574,
"acc_norm_stderr": 0.03260038511835771
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4298245614035088,
"acc_stderr": 0.046570472605949625,
"acc_norm": 0.4298245614035088,
"acc_norm_stderr": 0.046570472605949625
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.503448275862069,
"acc_stderr": 0.04166567577101579,
"acc_norm": 0.503448275862069,
"acc_norm_stderr": 0.04166567577101579
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.025197101074246483,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.025197101074246483
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4365079365079365,
"acc_stderr": 0.04435932892851466,
"acc_norm": 0.4365079365079365,
"acc_norm_stderr": 0.04435932892851466
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7354838709677419,
"acc_stderr": 0.02509189237885928,
"acc_norm": 0.7354838709677419,
"acc_norm_stderr": 0.02509189237885928
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4630541871921182,
"acc_stderr": 0.035083705204426656,
"acc_norm": 0.4630541871921182,
"acc_norm_stderr": 0.035083705204426656
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7454545454545455,
"acc_stderr": 0.03401506715249039,
"acc_norm": 0.7454545454545455,
"acc_norm_stderr": 0.03401506715249039
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7171717171717171,
"acc_stderr": 0.03208779558786751,
"acc_norm": 0.7171717171717171,
"acc_norm_stderr": 0.03208779558786751
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8549222797927462,
"acc_stderr": 0.025416343096306422,
"acc_norm": 0.8549222797927462,
"acc_norm_stderr": 0.025416343096306422
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6076923076923076,
"acc_stderr": 0.024756000382130952,
"acc_norm": 0.6076923076923076,
"acc_norm_stderr": 0.024756000382130952
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.028897748741131147,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.028897748741131147
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6428571428571429,
"acc_stderr": 0.031124619309328177,
"acc_norm": 0.6428571428571429,
"acc_norm_stderr": 0.031124619309328177
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8165137614678899,
"acc_stderr": 0.0165952597103993,
"acc_norm": 0.8165137614678899,
"acc_norm_stderr": 0.0165952597103993
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5046296296296297,
"acc_stderr": 0.03409825519163572,
"acc_norm": 0.5046296296296297,
"acc_norm_stderr": 0.03409825519163572
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7647058823529411,
"acc_stderr": 0.029771775228145635,
"acc_norm": 0.7647058823529411,
"acc_norm_stderr": 0.029771775228145635
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7805907172995781,
"acc_stderr": 0.026939106581553945,
"acc_norm": 0.7805907172995781,
"acc_norm_stderr": 0.026939106581553945
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6367713004484304,
"acc_stderr": 0.032277904428505,
"acc_norm": 0.6367713004484304,
"acc_norm_stderr": 0.032277904428505
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7404580152671756,
"acc_stderr": 0.03844876139785271,
"acc_norm": 0.7404580152671756,
"acc_norm_stderr": 0.03844876139785271
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.04065578140908705,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.04065578140908705
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252627
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7055214723926381,
"acc_stderr": 0.03581165790474082,
"acc_norm": 0.7055214723926381,
"acc_norm_stderr": 0.03581165790474082
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5535714285714286,
"acc_stderr": 0.047184714852195865,
"acc_norm": 0.5535714285714286,
"acc_norm_stderr": 0.047184714852195865
},
"harness|hendrycksTest-management|5": {
"acc": 0.8252427184466019,
"acc_stderr": 0.03760178006026622,
"acc_norm": 0.8252427184466019,
"acc_norm_stderr": 0.03760178006026622
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8504273504273504,
"acc_stderr": 0.023365051491753715,
"acc_norm": 0.8504273504273504,
"acc_norm_stderr": 0.023365051491753715
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.67,
"acc_stderr": 0.047258156262526094,
"acc_norm": 0.67,
"acc_norm_stderr": 0.047258156262526094
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8109833971902938,
"acc_stderr": 0.014000791294407003,
"acc_norm": 0.8109833971902938,
"acc_norm_stderr": 0.014000791294407003
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6994219653179191,
"acc_stderr": 0.024685316867257796,
"acc_norm": 0.6994219653179191,
"acc_norm_stderr": 0.024685316867257796
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3463687150837989,
"acc_stderr": 0.015913546784020117,
"acc_norm": 0.3463687150837989,
"acc_norm_stderr": 0.015913546784020117
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.696078431372549,
"acc_stderr": 0.026336613469046626,
"acc_norm": 0.696078431372549,
"acc_norm_stderr": 0.026336613469046626
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7106109324758842,
"acc_stderr": 0.025755865922632945,
"acc_norm": 0.7106109324758842,
"acc_norm_stderr": 0.025755865922632945
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.024922001168886335,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.024922001168886335
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.425531914893617,
"acc_stderr": 0.02949482760014437,
"acc_norm": 0.425531914893617,
"acc_norm_stderr": 0.02949482760014437
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4276401564537158,
"acc_stderr": 0.012635799922765844,
"acc_norm": 0.4276401564537158,
"acc_norm_stderr": 0.012635799922765844
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6213235294117647,
"acc_stderr": 0.02946513363977613,
"acc_norm": 0.6213235294117647,
"acc_norm_stderr": 0.02946513363977613
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6454248366013072,
"acc_stderr": 0.019353360547553707,
"acc_norm": 0.6454248366013072,
"acc_norm_stderr": 0.019353360547553707
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.046075820907199756,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.046075820907199756
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7020408163265306,
"acc_stderr": 0.02927956741106568,
"acc_norm": 0.7020408163265306,
"acc_norm_stderr": 0.02927956741106568
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8109452736318408,
"acc_stderr": 0.02768691358801302,
"acc_norm": 0.8109452736318408,
"acc_norm_stderr": 0.02768691358801302
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.83,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.83,
"acc_norm_stderr": 0.0377525168068637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5301204819277109,
"acc_stderr": 0.03885425420866767,
"acc_norm": 0.5301204819277109,
"acc_norm_stderr": 0.03885425420866767
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8011695906432749,
"acc_stderr": 0.030611116557432528,
"acc_norm": 0.8011695906432749,
"acc_norm_stderr": 0.030611116557432528
},
"harness|truthfulqa:mc|0": {
"mc1": 0.37454100367197063,
"mc1_stderr": 0.016943535128405327,
"mc2": 0.5477195184186756,
"mc2_stderr": 0.015358664393160576
},
"harness|winogrande|5": {
"acc": 0.7695343330702447,
"acc_stderr": 0.011835872164836676
},
"harness|gsm8k|5": {
"acc": 0.5640636846095527,
"acc_stderr": 0.013658968058849159
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s | [
"region:us"
] | 2024-02-17T00:28:11+00:00 | {"pretty_name": "Evaluation run of fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s", "dataset_summary": "Dataset automatically created during the evaluation run of model [fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s](https://huggingface.co/fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T00:25:52.922442](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s/blob/main/results_2024-02-17T00-25-52.922442.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6135127589001832,\n \"acc_stderr\": 0.032796760940438034,\n \"acc_norm\": 0.6157670560754505,\n \"acc_norm_stderr\": 0.033451817306662635,\n \"mc1\": 0.37454100367197063,\n \"mc1_stderr\": 0.016943535128405327,\n \"mc2\": 0.5477195184186756,\n \"mc2_stderr\": 0.015358664393160576\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5930034129692833,\n \"acc_stderr\": 0.01435639941800912,\n \"acc_norm\": 0.6407849829351536,\n \"acc_norm_stderr\": 0.014020224155839162\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6493726349332802,\n \"acc_stderr\": 0.00476191251170751,\n \"acc_norm\": 0.841167098187612,\n \"acc_norm_stderr\": 0.003647731723938848\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6447368421052632,\n \"acc_stderr\": 0.03894734487013316,\n \"acc_norm\": 0.6447368421052632,\n \"acc_norm_stderr\": 0.03894734487013316\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.028450154794118637,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.028450154794118637\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5361702127659574,\n \"acc_stderr\": 0.03260038511835771,\n \"acc_norm\": 0.5361702127659574,\n \"acc_norm_stderr\": 0.03260038511835771\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.046570472605949625,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.046570472605949625\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.025197101074246483,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.025197101074246483\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7354838709677419,\n \"acc_stderr\": 0.02509189237885928,\n \"acc_norm\": 0.7354838709677419,\n \"acc_norm_stderr\": 0.02509189237885928\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7171717171717171,\n \"acc_stderr\": 0.03208779558786751,\n \"acc_norm\": 0.7171717171717171,\n \"acc_norm_stderr\": 0.03208779558786751\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.025416343096306422,\n \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.025416343096306422\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6076923076923076,\n \"acc_stderr\": 0.024756000382130952,\n \"acc_norm\": 0.6076923076923076,\n \"acc_norm_stderr\": 0.024756000382130952\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131147,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131147\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6428571428571429,\n \"acc_stderr\": 0.031124619309328177,\n \"acc_norm\": 0.6428571428571429,\n \"acc_norm_stderr\": 0.031124619309328177\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8165137614678899,\n \"acc_stderr\": 0.0165952597103993,\n \"acc_norm\": 0.8165137614678899,\n \"acc_norm_stderr\": 0.0165952597103993\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.029771775228145635,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.029771775228145635\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6367713004484304,\n \"acc_stderr\": 0.032277904428505,\n \"acc_norm\": 0.6367713004484304,\n \"acc_norm_stderr\": 0.032277904428505\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908705,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908705\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7055214723926381,\n \"acc_stderr\": 0.03581165790474082,\n \"acc_norm\": 0.7055214723926381,\n \"acc_norm_stderr\": 0.03581165790474082\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5535714285714286,\n \"acc_stderr\": 0.047184714852195865,\n \"acc_norm\": 0.5535714285714286,\n \"acc_norm_stderr\": 0.047184714852195865\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026622,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026622\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8504273504273504,\n \"acc_stderr\": 0.023365051491753715,\n \"acc_norm\": 0.8504273504273504,\n \"acc_norm_stderr\": 0.023365051491753715\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8109833971902938,\n \"acc_stderr\": 0.014000791294407003,\n \"acc_norm\": 0.8109833971902938,\n \"acc_norm_stderr\": 0.014000791294407003\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.024685316867257796,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.024685316867257796\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3463687150837989,\n \"acc_stderr\": 0.015913546784020117,\n \"acc_norm\": 0.3463687150837989,\n \"acc_norm_stderr\": 0.015913546784020117\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.026336613469046626,\n \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.026336613469046626\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.024922001168886335,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.024922001168886335\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.02949482760014437,\n \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.02949482760014437\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4276401564537158,\n \"acc_stderr\": 0.012635799922765844,\n \"acc_norm\": 0.4276401564537158,\n \"acc_norm_stderr\": 0.012635799922765844\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6213235294117647,\n \"acc_stderr\": 0.02946513363977613,\n \"acc_norm\": 0.6213235294117647,\n \"acc_norm_stderr\": 0.02946513363977613\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6454248366013072,\n \"acc_stderr\": 0.019353360547553707,\n \"acc_norm\": 0.6454248366013072,\n \"acc_norm_stderr\": 0.019353360547553707\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.046075820907199756,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.046075820907199756\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7020408163265306,\n \"acc_stderr\": 0.02927956741106568,\n \"acc_norm\": 0.7020408163265306,\n \"acc_norm_stderr\": 0.02927956741106568\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n \"acc_stderr\": 0.02768691358801302,\n \"acc_norm\": 0.8109452736318408,\n \"acc_norm_stderr\": 0.02768691358801302\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.37454100367197063,\n \"mc1_stderr\": 0.016943535128405327,\n \"mc2\": 0.5477195184186756,\n \"mc2_stderr\": 0.015358664393160576\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7695343330702447,\n \"acc_stderr\": 0.011835872164836676\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5640636846095527,\n \"acc_stderr\": 0.013658968058849159\n }\n}\n```", "repo_url": "https://huggingface.co/fzzhang/Marcoroni-neural-chat-7B-v2_gsm8k_quantized_mergedfloat_s", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-25-52.922442.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["**/details_harness|winogrande|5_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T00-25-52.922442.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T00_25_52.922442", "path": ["results_2024-02-17T00-25-52.922442.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T00-25-52.922442.parquet"]}]}]} | 2024-02-17T00:28:35+00:00 |
dab8f1e32a0cbf696f7b4954ad68e0690559dbe8 | AIGym/ai-tech-articles | [
"region:us"
] | 2024-02-17T00:31:48+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 177472659, "num_examples": 17092}], "download_size": 80029866, "dataset_size": 177472659}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T00:31:54+00:00 |
|
1b053981b05760def58ae4ef4d97beb2e5f0accc | AIGym/reddit-clean | [
"region:us"
] | 2024-02-17T00:34:35+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 219359991, "num_examples": 152431}], "download_size": 136445950, "dataset_size": 219359991}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T00:34:43+00:00 |
|
014408ca5b2a05e0b435e4fd5704dc7b5fb18c00 | kleinfour/splat-data | [
"region:us"
] | 2024-02-17T00:36:59+00:00 | {} | 2024-02-17T00:38:05+00:00 |
|
bdb8d769cd62c2944bb4ff19da39b38235f18624 | Simonk97/HTL | [
"license:openrail",
"region:us"
] | 2024-02-17T00:43:25+00:00 | {"license": "openrail"} | 2024-02-17T00:45:07+00:00 |
|
2573e1c215e117f824845507a2762dd6eee0b276 | willwade/UNL-AAC-Phrases | [
"license:mit",
"region:us"
] | 2024-02-17T00:45:11+00:00 | {"license": "mit"} | 2024-02-17T00:46:56+00:00 |
|
8bde27897d89676b64c4d768fd31422553d4c6fc | andrewatef/MOV0_1 | [
"region:us"
] | 2024-02-17T00:55:33+00:00 | {"dataset_info": {"features": [{"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 27212260.0, "num_examples": 52476}], "download_size": 10659890, "dataset_size": 27212260.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T00:58:08+00:00 |
|
4c94484790715481c2df7a1fc85912c46ae9c252 | Asma50AA/S2T | [
"license:mit",
"region:us"
] | 2024-02-17T00:56:45+00:00 | {"license": "mit"} | 2024-02-17T00:56:45+00:00 |
|
15ce75e7a29f1ced3816d1349b09cf9e3a9c6928 |
# Dataset Card for Evaluation run of mayacinka/NeuralZephyr-Beagle-7B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [mayacinka/NeuralZephyr-Beagle-7B](https://huggingface.co/mayacinka/NeuralZephyr-Beagle-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_mayacinka__NeuralZephyr-Beagle-7B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T00:55:18.728023](https://huggingface.co/datasets/open-llm-leaderboard/details_mayacinka__NeuralZephyr-Beagle-7B/blob/main/results_2024-02-17T00-55-18.728023.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6499857973810573,
"acc_stderr": 0.0322179748198474,
"acc_norm": 0.651015316120668,
"acc_norm_stderr": 0.032873289143278944,
"mc1": 0.4944920440636475,
"mc1_stderr": 0.01750243899045106,
"mc2": 0.6516576799205165,
"mc2_stderr": 0.01520679103207334
},
"harness|arc:challenge|25": {
"acc": 0.6578498293515358,
"acc_stderr": 0.013864152159177275,
"acc_norm": 0.6860068259385665,
"acc_norm_stderr": 0.013562691224726297
},
"harness|hellaswag|10": {
"acc": 0.6852220673172674,
"acc_stderr": 0.004634782156128581,
"acc_norm": 0.8637721569408484,
"acc_norm_stderr": 0.0034232928816321498
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6444444444444445,
"acc_stderr": 0.04135176749720385,
"acc_norm": 0.6444444444444445,
"acc_norm_stderr": 0.04135176749720385
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7039473684210527,
"acc_stderr": 0.03715062154998904,
"acc_norm": 0.7039473684210527,
"acc_norm_stderr": 0.03715062154998904
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7169811320754716,
"acc_stderr": 0.027724236492700918,
"acc_norm": 0.7169811320754716,
"acc_norm_stderr": 0.027724236492700918
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7361111111111112,
"acc_stderr": 0.03685651095897532,
"acc_norm": 0.7361111111111112,
"acc_norm_stderr": 0.03685651095897532
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6820809248554913,
"acc_stderr": 0.0355068398916558,
"acc_norm": 0.6820809248554913,
"acc_norm_stderr": 0.0355068398916558
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.048971049527263666,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.048971049527263666
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.042295258468165065,
"acc_norm": 0.77,
"acc_norm_stderr": 0.042295258468165065
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5914893617021276,
"acc_stderr": 0.032134180267015755,
"acc_norm": 0.5914893617021276,
"acc_norm_stderr": 0.032134180267015755
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5175438596491229,
"acc_stderr": 0.04700708033551038,
"acc_norm": 0.5175438596491229,
"acc_norm_stderr": 0.04700708033551038
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5724137931034483,
"acc_stderr": 0.04122737111370333,
"acc_norm": 0.5724137931034483,
"acc_norm_stderr": 0.04122737111370333
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.02533120243894444,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.02533120243894444
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.044444444444444495,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.044444444444444495
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7903225806451613,
"acc_stderr": 0.023157879349083525,
"acc_norm": 0.7903225806451613,
"acc_norm_stderr": 0.023157879349083525
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.035179450386910616,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.035179450386910616
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7696969696969697,
"acc_stderr": 0.0328766675860349,
"acc_norm": 0.7696969696969697,
"acc_norm_stderr": 0.0328766675860349
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7929292929292929,
"acc_stderr": 0.02886977846026705,
"acc_norm": 0.7929292929292929,
"acc_norm_stderr": 0.02886977846026705
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8808290155440415,
"acc_stderr": 0.02338193534812142,
"acc_norm": 0.8808290155440415,
"acc_norm_stderr": 0.02338193534812142
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6692307692307692,
"acc_stderr": 0.02385479568097112,
"acc_norm": 0.6692307692307692,
"acc_norm_stderr": 0.02385479568097112
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3592592592592593,
"acc_stderr": 0.029252905927251972,
"acc_norm": 0.3592592592592593,
"acc_norm_stderr": 0.029252905927251972
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7016806722689075,
"acc_stderr": 0.029719142876342856,
"acc_norm": 0.7016806722689075,
"acc_norm_stderr": 0.029719142876342856
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8330275229357799,
"acc_stderr": 0.01599015488507338,
"acc_norm": 0.8330275229357799,
"acc_norm_stderr": 0.01599015488507338
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.03388857118502325,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.03388857118502325
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8333333333333334,
"acc_stderr": 0.02615686752393104,
"acc_norm": 0.8333333333333334,
"acc_norm_stderr": 0.02615686752393104
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7805907172995781,
"acc_stderr": 0.026939106581553945,
"acc_norm": 0.7805907172995781,
"acc_norm_stderr": 0.026939106581553945
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6816143497757847,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.6816143497757847,
"acc_norm_stderr": 0.03126580522513713
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7786259541984732,
"acc_stderr": 0.03641297081313728,
"acc_norm": 0.7786259541984732,
"acc_norm_stderr": 0.03641297081313728
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.768595041322314,
"acc_stderr": 0.03849856098794088,
"acc_norm": 0.768595041322314,
"acc_norm_stderr": 0.03849856098794088
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7592592592592593,
"acc_stderr": 0.04133119440243838,
"acc_norm": 0.7592592592592593,
"acc_norm_stderr": 0.04133119440243838
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7791411042944786,
"acc_stderr": 0.03259177392742178,
"acc_norm": 0.7791411042944786,
"acc_norm_stderr": 0.03259177392742178
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.047268355537191,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.047268355537191
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8717948717948718,
"acc_stderr": 0.02190190511507333,
"acc_norm": 0.8717948717948718,
"acc_norm_stderr": 0.02190190511507333
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8237547892720306,
"acc_stderr": 0.013625556907993452,
"acc_norm": 0.8237547892720306,
"acc_norm_stderr": 0.013625556907993452
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7312138728323699,
"acc_stderr": 0.02386800326250011,
"acc_norm": 0.7312138728323699,
"acc_norm_stderr": 0.02386800326250011
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.43798882681564244,
"acc_stderr": 0.016593394227564843,
"acc_norm": 0.43798882681564244,
"acc_norm_stderr": 0.016593394227564843
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7189542483660131,
"acc_stderr": 0.025738854797818737,
"acc_norm": 0.7189542483660131,
"acc_norm_stderr": 0.025738854797818737
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7138263665594855,
"acc_stderr": 0.02567025924218893,
"acc_norm": 0.7138263665594855,
"acc_norm_stderr": 0.02567025924218893
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7376543209876543,
"acc_stderr": 0.02447722285613511,
"acc_norm": 0.7376543209876543,
"acc_norm_stderr": 0.02447722285613511
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5,
"acc_stderr": 0.029827499313594685,
"acc_norm": 0.5,
"acc_norm_stderr": 0.029827499313594685
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4654498044328553,
"acc_stderr": 0.012739711554045702,
"acc_norm": 0.4654498044328553,
"acc_norm_stderr": 0.012739711554045702
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.02841820861940676,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.02841820861940676
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6617647058823529,
"acc_stderr": 0.019139943748487046,
"acc_norm": 0.6617647058823529,
"acc_norm_stderr": 0.019139943748487046
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.726530612244898,
"acc_stderr": 0.028535560337128445,
"acc_norm": 0.726530612244898,
"acc_norm_stderr": 0.028535560337128445
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8407960199004975,
"acc_stderr": 0.02587064676616913,
"acc_norm": 0.8407960199004975,
"acc_norm_stderr": 0.02587064676616913
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.03588702812826371,
"acc_norm": 0.85,
"acc_norm_stderr": 0.03588702812826371
},
"harness|hendrycksTest-virology|5": {
"acc": 0.536144578313253,
"acc_stderr": 0.038823108508905954,
"acc_norm": 0.536144578313253,
"acc_norm_stderr": 0.038823108508905954
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.847953216374269,
"acc_stderr": 0.027539122889061456,
"acc_norm": 0.847953216374269,
"acc_norm_stderr": 0.027539122889061456
},
"harness|truthfulqa:mc|0": {
"mc1": 0.4944920440636475,
"mc1_stderr": 0.01750243899045106,
"mc2": 0.6516576799205165,
"mc2_stderr": 0.01520679103207334
},
"harness|winogrande|5": {
"acc": 0.8113654301499605,
"acc_stderr": 0.010995172318019816
},
"harness|gsm8k|5": {
"acc": 0.6345716451857468,
"acc_stderr": 0.013264282030266633
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_mayacinka__NeuralZephyr-Beagle-7B | [
"region:us"
] | 2024-02-17T00:57:39+00:00 | {"pretty_name": "Evaluation run of mayacinka/NeuralZephyr-Beagle-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [mayacinka/NeuralZephyr-Beagle-7B](https://huggingface.co/mayacinka/NeuralZephyr-Beagle-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mayacinka__NeuralZephyr-Beagle-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T00:55:18.728023](https://huggingface.co/datasets/open-llm-leaderboard/details_mayacinka__NeuralZephyr-Beagle-7B/blob/main/results_2024-02-17T00-55-18.728023.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6499857973810573,\n \"acc_stderr\": 0.0322179748198474,\n \"acc_norm\": 0.651015316120668,\n \"acc_norm_stderr\": 0.032873289143278944,\n \"mc1\": 0.4944920440636475,\n \"mc1_stderr\": 0.01750243899045106,\n \"mc2\": 0.6516576799205165,\n \"mc2_stderr\": 0.01520679103207334\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6578498293515358,\n \"acc_stderr\": 0.013864152159177275,\n \"acc_norm\": 0.6860068259385665,\n \"acc_norm_stderr\": 0.013562691224726297\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6852220673172674,\n \"acc_stderr\": 0.004634782156128581,\n \"acc_norm\": 0.8637721569408484,\n \"acc_norm_stderr\": 0.0034232928816321498\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700918,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700918\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.02533120243894444,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.02533120243894444\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083525,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.02886977846026705,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.02886977846026705\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.02338193534812142,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.02338193534812142\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.02385479568097112,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.02385479568097112\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251972,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251972\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.029719142876342856,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.029719142876342856\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.01599015488507338,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.01599015488507338\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.03388857118502325,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.03388857118502325\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.02615686752393104,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.02615686752393104\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313728,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313728\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n \"acc_stderr\": 0.013625556907993452,\n \"acc_norm\": 0.8237547892720306,\n \"acc_norm_stderr\": 0.013625556907993452\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.02386800326250011,\n \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.02386800326250011\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.43798882681564244,\n \"acc_stderr\": 0.016593394227564843,\n \"acc_norm\": 0.43798882681564244,\n \"acc_norm_stderr\": 0.016593394227564843\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7189542483660131,\n \"acc_stderr\": 0.025738854797818737,\n \"acc_norm\": 0.7189542483660131,\n \"acc_norm_stderr\": 0.025738854797818737\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.02567025924218893,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.02567025924218893\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.02447722285613511,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.02447722285613511\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4654498044328553,\n \"acc_stderr\": 0.012739711554045702,\n \"acc_norm\": 0.4654498044328553,\n \"acc_norm_stderr\": 0.012739711554045702\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.019139943748487046,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.019139943748487046\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128445,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128445\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.027539122889061456,\n \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.027539122889061456\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4944920440636475,\n \"mc1_stderr\": 0.01750243899045106,\n \"mc2\": 0.6516576799205165,\n \"mc2_stderr\": 0.01520679103207334\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8113654301499605,\n \"acc_stderr\": 0.010995172318019816\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6345716451857468,\n \"acc_stderr\": 0.013264282030266633\n }\n}\n```", "repo_url": "https://huggingface.co/mayacinka/NeuralZephyr-Beagle-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T00-55-18.728023.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["**/details_harness|winogrande|5_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T00-55-18.728023.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T00_55_18.728023", "path": ["results_2024-02-17T00-55-18.728023.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T00-55-18.728023.parquet"]}]}]} | 2024-02-17T00:58:02+00:00 |
b14421d37453a34a3afebd404ab1db9a298ee7ec | rokkamaravind321/500-3k-dataset | [
"region:us"
] | 2024-02-17T01:09:46+00:00 | {} | 2024-02-17T01:10:11+00:00 |
|
2b8337f0850cd264be370a0bf498f98f365411d8 | ramixpe/ramixpe | [
"region:us"
] | 2024-02-17T01:39:06+00:00 | {"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 19183, "num_examples": 70}], "download_size": 8097, "dataset_size": 19183}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T01:39:08+00:00 |
|
467b05c3006eb9e87c69b3289b03a43f673b82a0 | bertram-gilfoyle/CC-MAIN-2023-14 | [
"region:us"
] | 2024-02-17T01:44:51+00:00 | {} | 2024-02-17T07:58:13+00:00 |
|
6a1d6951f523f6518474862899fd6daa6a9967c2 | ramixpe/fankosh | [
"region:us"
] | 2024-02-17T01:46:39+00:00 | {"dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "output", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 19183, "num_examples": 70}], "download_size": 8097, "dataset_size": 19183}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T01:46:42+00:00 |
|
1a6886aec0a8047e7a2164167c584e29a673a4c2 | steven1116/ninespecies_exclude_honeybee | [
"license:apache-2.0",
"region:us"
] | 2024-02-17T01:55:27+00:00 | {"license": "apache-2.0"} | 2024-02-17T01:55:27+00:00 |
|
9be201e809b81ed59c0fcd875d8138915d338d31 | hyperdemocracy/us-congress | [
"region:us"
] | 2024-02-17T01:56:03+00:00 | {"configs": [{"config_name": "billstatus_xml", "data_files": [{"split": "108", "path": "data/billstatus_xml/usc-108-billstatus-xml.parquet"}, {"split": "109", "path": "data/billstatus_xml/usc-109-billstatus-xml.parquet"}, {"split": "110", "path": "data/billstatus_xml/usc-110-billstatus-xml.parquet"}, {"split": "111", "path": "data/billstatus_xml/usc-111-billstatus-xml.parquet"}, {"split": "112", "path": "data/billstatus_xml/usc-112-billstatus-xml.parquet"}, {"split": "113", "path": "data/billstatus_xml/usc-113-billstatus-xml.parquet"}, {"split": "114", "path": "data/billstatus_xml/usc-114-billstatus-xml.parquet"}, {"split": "115", "path": "data/billstatus_xml/usc-115-billstatus-xml.parquet"}, {"split": "116", "path": "data/billstatus_xml/usc-116-billstatus-xml.parquet"}, {"split": "117", "path": "data/billstatus_xml/usc-117-billstatus-xml.parquet"}, {"split": "118", "path": "data/billstatus_xml/usc-118-billstatus-xml.parquet"}]}, {"config_name": "billstatus_parsed", "data_files": [{"split": "108", "path": "data/billstatus_parsed/usc-108-billstatus-parsed.parquet"}, {"split": "109", "path": "data/billstatus_parsed/usc-109-billstatus-parsed.parquet"}, {"split": "110", "path": "data/billstatus_parsed/usc-110-billstatus-parsed.parquet"}, {"split": "111", "path": "data/billstatus_parsed/usc-111-billstatus-parsed.parquet"}, {"split": "112", "path": "data/billstatus_parsed/usc-112-billstatus-parsed.parquet"}, {"split": "113", "path": "data/billstatus_parsed/usc-113-billstatus-parsed.parquet"}, {"split": "114", "path": "data/billstatus_parsed/usc-114-billstatus-parsed.parquet"}, {"split": "115", "path": "data/billstatus_parsed/usc-115-billstatus-parsed.parquet"}, {"split": "116", "path": "data/billstatus_parsed/usc-116-billstatus-parsed.parquet"}, {"split": "117", "path": "data/billstatus_parsed/usc-117-billstatus-parsed.parquet"}, {"split": "118", "path": "data/billstatus_parsed/usc-118-billstatus-parsed.parquet"}]}, {"config_name": "textversions_ddt_xml", "data_files": [{"split": "113", "path": "data/textversions_ddt_xml/usc-113-textversions-ddt-xml.parquet"}, {"split": "114", "path": "data/textversions_ddt_xml/usc-114-textversions-ddt-xml.parquet"}, {"split": "115", "path": "data/textversions_ddt_xml/usc-115-textversions-ddt-xml.parquet"}, {"split": "116", "path": "data/textversions_ddt_xml/usc-116-textversions-ddt-xml.parquet"}, {"split": "117", "path": "data/textversions_ddt_xml/usc-117-textversions-ddt-xml.parquet"}, {"split": "118", "path": "data/textversions_ddt_xml/usc-118-textversions-ddt-xml.parquet"}]}, {"config_name": "textversions_uslm_xml", "data_files": [{"split": "113", "path": "data/textversions_uslm_xml/usc-113-textversions-uslm-xml.parquet"}, {"split": "114", "path": "data/textversions_uslm_xml/usc-114-textversions-uslm-xml.parquet"}, {"split": "115", "path": "data/textversions_uslm_xml/usc-115-textversions-uslm-xml.parquet"}, {"split": "116", "path": "data/textversions_uslm_xml/usc-116-textversions-uslm-xml.parquet"}, {"split": "117", "path": "data/textversions_uslm_xml/usc-117-textversions-uslm-xml.parquet"}, {"split": "118", "path": "data/textversions_uslm_xml/usc-118-textversions-uslm-xml.parquet"}]}, {"config_name": "unified_v1", "data_files": [{"split": "113", "path": "data/unified_v1/usc-113-unified-v1.parquet"}, {"split": "114", "path": "data/unified_v1/usc-114-unified-v1.parquet"}, {"split": "115", "path": "data/unified_v1/usc-115-unified-v1.parquet"}, {"split": "116", "path": "data/unified_v1/usc-116-unified-v1.parquet"}, {"split": "117", "path": "data/unified_v1/usc-117-unified-v1.parquet"}, {"split": "118", "path": "data/unified_v1/usc-118-unified-v1.parquet"}]}, {"config_name": "chunks_v1_s1024_o256", "data_files": [{"split": "113", "path": "data/chunks_v1_s1024_o256/usc-113-chunks-v1-s1024-o256.parquet"}, {"split": "114", "path": "data/chunks_v1_s1024_o256/usc-114-chunks-v1-s1024-o256.parquet"}, {"split": "115", "path": "data/chunks_v1_s1024_o256/usc-115-chunks-v1-s1024-o256.parquet"}, {"split": "116", "path": "data/chunks_v1_s1024_o256/usc-116-chunks-v1-s1024-o256.parquet"}, {"split": "117", "path": "data/chunks_v1_s1024_o256/usc-117-chunks-v1-s1024-o256.parquet"}, {"split": "118", "path": "data/chunks_v1_s1024_o256/usc-118-chunks-v1-s1024-o256.parquet"}]}, {"config_name": "chunks_v1_s2048_o256", "data_files": [{"split": "113", "path": "data/chunks_v1_s2048_o256/usc-113-chunks-v1-s2048-o256.parquet"}, {"split": "114", "path": "data/chunks_v1_s2048_o256/usc-114-chunks-v1-s2048-o256.parquet"}, {"split": "115", "path": "data/chunks_v1_s2048_o256/usc-115-chunks-v1-s2048-o256.parquet"}, {"split": "116", "path": "data/chunks_v1_s2048_o256/usc-116-chunks-v1-s2048-o256.parquet"}, {"split": "117", "path": "data/chunks_v1_s2048_o256/usc-117-chunks-v1-s2048-o256.parquet"}, {"split": "118", "path": "data/chunks_v1_s2048_o256/usc-118-chunks-v1-s2048-o256.parquet"}]}, {"config_name": "chunks_v1_s4096_o512", "data_files": [{"split": "113", "path": "data/chunks_v1_s4096_o512/usc-113-chunks-v1-s4096-o512.parquet"}, {"split": "114", "path": "data/chunks_v1_s4096_o512/usc-114-chunks-v1-s4096-o512.parquet"}, {"split": "115", "path": "data/chunks_v1_s4096_o512/usc-115-chunks-v1-s4096-o512.parquet"}, {"split": "116", "path": "data/chunks_v1_s4096_o512/usc-116-chunks-v1-s4096-o512.parquet"}, {"split": "117", "path": "data/chunks_v1_s4096_o512/usc-117-chunks-v1-s4096-o512.parquet"}, {"split": "118", "path": "data/chunks_v1_s4096_o512/usc-118-chunks-v1-s4096-o512.parquet"}]}, {"config_name": "chunks_v1_s8192_o512", "data_files": [{"split": "113", "path": "data/chunks_v1_s8192_o512/usc-113-chunks-v1-s8192-o512.parquet"}, {"split": "114", "path": "data/chunks_v1_s8192_o512/usc-114-chunks-v1-s8192-o512.parquet"}, {"split": "115", "path": "data/chunks_v1_s8192_o512/usc-115-chunks-v1-s8192-o512.parquet"}, {"split": "116", "path": "data/chunks_v1_s8192_o512/usc-116-chunks-v1-s8192-o512.parquet"}, {"split": "117", "path": "data/chunks_v1_s8192_o512/usc-117-chunks-v1-s8192-o512.parquet"}, {"split": "118", "path": "data/chunks_v1_s8192_o512/usc-118-chunks-v1-s8192-o512.parquet"}]}]} | 2024-02-17T16:29:06+00:00 |
|
8a70b675e938957a3e6e9f83791f1a735659c8e0 | badokorach/EnglishTestData | [
"region:us"
] | 2024-02-17T02:01:18+00:00 | {} | 2024-02-17T02:59:40+00:00 |
|
03201863bfdb5c9f6a549e7afb1b737ef1ac61c9 | cmcmaster/rheumatology-dpo-binarized | [
"region:us"
] | 2024-02-17T02:16:36+00:00 | {"dataset_info": {"features": [{"name": "chosen", "sequence": "string"}, {"name": "rejected", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 4401757, "num_examples": 696}], "download_size": 2111818, "dataset_size": 4401757}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T02:16:40+00:00 |
|
5559e4e9f12c692f66ae7093bff96b8875f0879a | cmcmaster/rheumatology-sft-full | [
"region:us"
] | 2024-02-17T02:17:04+00:00 | {"dataset_info": {"features": [{"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 5596479, "num_examples": 6378}, {"name": "test", "num_bytes": 400258, "num_examples": 500}], "download_size": 3343739, "dataset_size": 5996737}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T02:26:09+00:00 |
|
4a4ed3b7542dc5bc6d3fc6f3335c4a31e8f6d990 | maff91/aes_sedai_assistant | [
"license:mit",
"region:us"
] | 2024-02-17T02:23:59+00:00 | {"license": "mit"} | 2024-02-17T02:50:59+00:00 |
|
3b14c002f73c1b5f5bd61b6b1a37c89b3cc3ec67 | 对话重写数据集,总量约160万。
文件格式:jsonl
单行示例:
```
{"dialogue": ["仁波: 我记得古代的金属工艺很厉害啊,那个啥,锻打技术超前的。", "冯艳婷: 没错,像那个景泰蓝,多美啊。"], "last_utterance_rewrite": "冯艳婷: 确实,例如景泰蓝这种金属工艺品,它的外观非常美丽。"}
``` | infgrad/dialogue_rewrite_llm | [
"license:mit",
"region:us"
] | 2024-02-17T02:29:18+00:00 | {"license": "mit"} | 2024-02-17T02:33:52+00:00 |
5d63c037c43f353fb0709efd46ba76e1e7652f6d | bubl-ai/story_with_synthetic_test_set | [
"license:mit",
"region:us"
] | 2024-02-17T02:31:56+00:00 | {"license": "mit"} | 2024-02-17T02:41:16+00:00 |
|
e7128ecc002ca6a0aa7605182fed935e5582ec30 | YiyangAiLab/POVID_preference_data_for_VLLMs | [
"license:cc-by-4.0",
"region:us"
] | 2024-02-17T02:53:49+00:00 | {"license": "cc-by-4.0"} | 2024-02-17T04:39:51+00:00 |
|
b8e0ecfdc8e0175a99b997c1237a9b936133ce5f |
# Dataset Card for Evaluation run of ConvexAI/Luminex-34B-v0.1
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [ConvexAI/Luminex-34B-v0.1](https://huggingface.co/ConvexAI/Luminex-34B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_ConvexAI__Luminex-34B-v0.1",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T02:55:24.187790](https://huggingface.co/datasets/open-llm-leaderboard/details_ConvexAI__Luminex-34B-v0.1/blob/main/results_2024-02-17T02-55-24.187790.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.763525493697814,
"acc_stderr": 0.02835769353284993,
"acc_norm": 0.7666743377682688,
"acc_norm_stderr": 0.02890561343087252,
"mc1": 0.5250917992656059,
"mc1_stderr": 0.01748144680410401,
"mc2": 0.6967876357426273,
"mc2_stderr": 0.014243776412915276
},
"harness|arc:challenge|25": {
"acc": 0.712457337883959,
"acc_stderr": 0.013226719056266129,
"acc_norm": 0.7363481228668942,
"acc_norm_stderr": 0.012875929151297061
},
"harness|hellaswag|10": {
"acc": 0.6719776936865166,
"acc_stderr": 0.00468533484403866,
"acc_norm": 0.8658633738299144,
"acc_norm_stderr": 0.0034010255178737255
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7407407407407407,
"acc_stderr": 0.03785714465066654,
"acc_norm": 0.7407407407407407,
"acc_norm_stderr": 0.03785714465066654
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.9013157894736842,
"acc_stderr": 0.024270227737522715,
"acc_norm": 0.9013157894736842,
"acc_norm_stderr": 0.024270227737522715
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768079
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8226415094339623,
"acc_stderr": 0.023508739218846945,
"acc_norm": 0.8226415094339623,
"acc_norm_stderr": 0.023508739218846945
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.9027777777777778,
"acc_stderr": 0.024774516250440182,
"acc_norm": 0.9027777777777778,
"acc_norm_stderr": 0.024774516250440182
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.45,
"acc_stderr": 0.04999999999999998,
"acc_norm": 0.45,
"acc_norm_stderr": 0.04999999999999998
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7398843930635838,
"acc_stderr": 0.033450369167889904,
"acc_norm": 0.7398843930635838,
"acc_norm_stderr": 0.033450369167889904
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5098039215686274,
"acc_stderr": 0.04974229460422817,
"acc_norm": 0.5098039215686274,
"acc_norm_stderr": 0.04974229460422817
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.79,
"acc_norm_stderr": 0.04093601807403326
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7659574468085106,
"acc_stderr": 0.02767845257821239,
"acc_norm": 0.7659574468085106,
"acc_norm_stderr": 0.02767845257821239
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5877192982456141,
"acc_stderr": 0.04630653203366596,
"acc_norm": 0.5877192982456141,
"acc_norm_stderr": 0.04630653203366596
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7517241379310344,
"acc_stderr": 0.036001056927277696,
"acc_norm": 0.7517241379310344,
"acc_norm_stderr": 0.036001056927277696
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.7380952380952381,
"acc_stderr": 0.022644212615525214,
"acc_norm": 0.7380952380952381,
"acc_norm_stderr": 0.022644212615525214
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5476190476190477,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.5476190476190477,
"acc_norm_stderr": 0.044518079590553275
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.9032258064516129,
"acc_stderr": 0.016818943416345197,
"acc_norm": 0.9032258064516129,
"acc_norm_stderr": 0.016818943416345197
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6748768472906403,
"acc_stderr": 0.03295797566311271,
"acc_norm": 0.6748768472906403,
"acc_norm_stderr": 0.03295797566311271
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.78,
"acc_stderr": 0.04163331998932261,
"acc_norm": 0.78,
"acc_norm_stderr": 0.04163331998932261
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8606060606060606,
"acc_stderr": 0.027045948825865394,
"acc_norm": 0.8606060606060606,
"acc_norm_stderr": 0.027045948825865394
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9292929292929293,
"acc_stderr": 0.01826310542019949,
"acc_norm": 0.9292929292929293,
"acc_norm_stderr": 0.01826310542019949
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9740932642487047,
"acc_stderr": 0.01146452335695315,
"acc_norm": 0.9740932642487047,
"acc_norm_stderr": 0.01146452335695315
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8076923076923077,
"acc_stderr": 0.019982347208637306,
"acc_norm": 0.8076923076923077,
"acc_norm_stderr": 0.019982347208637306
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.45925925925925926,
"acc_stderr": 0.03038416923235083,
"acc_norm": 0.45925925925925926,
"acc_norm_stderr": 0.03038416923235083
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8529411764705882,
"acc_stderr": 0.02300545944667394,
"acc_norm": 0.8529411764705882,
"acc_norm_stderr": 0.02300545944667394
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5364238410596026,
"acc_stderr": 0.04071636065944216,
"acc_norm": 0.5364238410596026,
"acc_norm_stderr": 0.04071636065944216
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9192660550458716,
"acc_stderr": 0.011680172292862083,
"acc_norm": 0.9192660550458716,
"acc_norm_stderr": 0.011680172292862083
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6712962962962963,
"acc_stderr": 0.032036140846700596,
"acc_norm": 0.6712962962962963,
"acc_norm_stderr": 0.032036140846700596
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9313725490196079,
"acc_stderr": 0.017744453647073322,
"acc_norm": 0.9313725490196079,
"acc_norm_stderr": 0.017744453647073322
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9113924050632911,
"acc_stderr": 0.018498315206865384,
"acc_norm": 0.9113924050632911,
"acc_norm_stderr": 0.018498315206865384
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.8026905829596412,
"acc_stderr": 0.02670985334496796,
"acc_norm": 0.8026905829596412,
"acc_norm_stderr": 0.02670985334496796
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8778625954198473,
"acc_stderr": 0.028718776889342344,
"acc_norm": 0.8778625954198473,
"acc_norm_stderr": 0.028718776889342344
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8842975206611571,
"acc_stderr": 0.029199802455622804,
"acc_norm": 0.8842975206611571,
"acc_norm_stderr": 0.029199802455622804
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8518518518518519,
"acc_stderr": 0.03434300243630999,
"acc_norm": 0.8518518518518519,
"acc_norm_stderr": 0.03434300243630999
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8404907975460123,
"acc_stderr": 0.02876748172598386,
"acc_norm": 0.8404907975460123,
"acc_norm_stderr": 0.02876748172598386
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5625,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.5625,
"acc_norm_stderr": 0.04708567521880525
},
"harness|hendrycksTest-management|5": {
"acc": 0.8349514563106796,
"acc_stderr": 0.03675668832233188,
"acc_norm": 0.8349514563106796,
"acc_norm_stderr": 0.03675668832233188
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9444444444444444,
"acc_stderr": 0.01500631280644693,
"acc_norm": 0.9444444444444444,
"acc_norm_stderr": 0.01500631280644693
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.91,
"acc_stderr": 0.028762349126466143,
"acc_norm": 0.91,
"acc_norm_stderr": 0.028762349126466143
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9106002554278416,
"acc_stderr": 0.0102030178476883,
"acc_norm": 0.9106002554278416,
"acc_norm_stderr": 0.0102030178476883
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8208092485549133,
"acc_stderr": 0.020647590029679332,
"acc_norm": 0.8208092485549133,
"acc_norm_stderr": 0.020647590029679332
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.7631284916201118,
"acc_stderr": 0.014219570788103986,
"acc_norm": 0.7631284916201118,
"acc_norm_stderr": 0.014219570788103986
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8496732026143791,
"acc_stderr": 0.020464175124332625,
"acc_norm": 0.8496732026143791,
"acc_norm_stderr": 0.020464175124332625
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.8006430868167203,
"acc_stderr": 0.022691033780549656,
"acc_norm": 0.8006430868167203,
"acc_norm_stderr": 0.022691033780549656
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8765432098765432,
"acc_stderr": 0.01830386880689179,
"acc_norm": 0.8765432098765432,
"acc_norm_stderr": 0.01830386880689179
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6205673758865248,
"acc_stderr": 0.028947338851614098,
"acc_norm": 0.6205673758865248,
"acc_norm_stderr": 0.028947338851614098
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.5925684485006519,
"acc_stderr": 0.012549473714212224,
"acc_norm": 0.5925684485006519,
"acc_norm_stderr": 0.012549473714212224
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8161764705882353,
"acc_stderr": 0.02352924218519311,
"acc_norm": 0.8161764705882353,
"acc_norm_stderr": 0.02352924218519311
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8186274509803921,
"acc_stderr": 0.01558864349537047,
"acc_norm": 0.8186274509803921,
"acc_norm_stderr": 0.01558864349537047
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7181818181818181,
"acc_stderr": 0.04309118709946458,
"acc_norm": 0.7181818181818181,
"acc_norm_stderr": 0.04309118709946458
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8408163265306122,
"acc_stderr": 0.02342097206916635,
"acc_norm": 0.8408163265306122,
"acc_norm_stderr": 0.02342097206916635
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.900497512437811,
"acc_stderr": 0.021166216304659407,
"acc_norm": 0.900497512437811,
"acc_norm_stderr": 0.021166216304659407
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.91,
"acc_stderr": 0.02876234912646613,
"acc_norm": 0.91,
"acc_norm_stderr": 0.02876234912646613
},
"harness|hendrycksTest-virology|5": {
"acc": 0.572289156626506,
"acc_stderr": 0.038515976837185335,
"acc_norm": 0.572289156626506,
"acc_norm_stderr": 0.038515976837185335
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8713450292397661,
"acc_stderr": 0.025679342723276908,
"acc_norm": 0.8713450292397661,
"acc_norm_stderr": 0.025679342723276908
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5250917992656059,
"mc1_stderr": 0.01748144680410401,
"mc2": 0.6967876357426273,
"mc2_stderr": 0.014243776412915276
},
"harness|winogrande|5": {
"acc": 0.8342541436464088,
"acc_stderr": 0.010450899545370616
},
"harness|gsm8k|5": {
"acc": 0.7247915087187263,
"acc_stderr": 0.012302114305862656
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_ConvexAI__Luminex-34B-v0.1 | [
"region:us"
] | 2024-02-17T02:57:38+00:00 | {"pretty_name": "Evaluation run of ConvexAI/Luminex-34B-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [ConvexAI/Luminex-34B-v0.1](https://huggingface.co/ConvexAI/Luminex-34B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ConvexAI__Luminex-34B-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T02:55:24.187790](https://huggingface.co/datasets/open-llm-leaderboard/details_ConvexAI__Luminex-34B-v0.1/blob/main/results_2024-02-17T02-55-24.187790.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.763525493697814,\n \"acc_stderr\": 0.02835769353284993,\n \"acc_norm\": 0.7666743377682688,\n \"acc_norm_stderr\": 0.02890561343087252,\n \"mc1\": 0.5250917992656059,\n \"mc1_stderr\": 0.01748144680410401,\n \"mc2\": 0.6967876357426273,\n \"mc2_stderr\": 0.014243776412915276\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.712457337883959,\n \"acc_stderr\": 0.013226719056266129,\n \"acc_norm\": 0.7363481228668942,\n \"acc_norm_stderr\": 0.012875929151297061\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6719776936865166,\n \"acc_stderr\": 0.00468533484403866,\n \"acc_norm\": 0.8658633738299144,\n \"acc_norm_stderr\": 0.0034010255178737255\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.03785714465066654,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.03785714465066654\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.9013157894736842,\n \"acc_stderr\": 0.024270227737522715,\n \"acc_norm\": 0.9013157894736842,\n \"acc_norm_stderr\": 0.024270227737522715\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.8226415094339623,\n \"acc_stderr\": 0.023508739218846945,\n \"acc_norm\": 0.8226415094339623,\n \"acc_norm_stderr\": 0.023508739218846945\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9027777777777778,\n \"acc_stderr\": 0.024774516250440182,\n \"acc_norm\": 0.9027777777777778,\n \"acc_norm_stderr\": 0.024774516250440182\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.04999999999999998,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.04999999999999998\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.033450369167889904,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.033450369167889904\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5098039215686274,\n \"acc_stderr\": 0.04974229460422817,\n \"acc_norm\": 0.5098039215686274,\n \"acc_norm_stderr\": 0.04974229460422817\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7659574468085106,\n \"acc_stderr\": 0.02767845257821239,\n \"acc_norm\": 0.7659574468085106,\n \"acc_norm_stderr\": 0.02767845257821239\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5877192982456141,\n \"acc_stderr\": 0.04630653203366596,\n \"acc_norm\": 0.5877192982456141,\n \"acc_norm_stderr\": 0.04630653203366596\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7517241379310344,\n \"acc_stderr\": 0.036001056927277696,\n \"acc_norm\": 0.7517241379310344,\n \"acc_norm_stderr\": 0.036001056927277696\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7380952380952381,\n \"acc_stderr\": 0.022644212615525214,\n \"acc_norm\": 0.7380952380952381,\n \"acc_norm_stderr\": 0.022644212615525214\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5476190476190477,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.5476190476190477,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.9032258064516129,\n \"acc_stderr\": 0.016818943416345197,\n \"acc_norm\": 0.9032258064516129,\n \"acc_norm_stderr\": 0.016818943416345197\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6748768472906403,\n \"acc_stderr\": 0.03295797566311271,\n \"acc_norm\": 0.6748768472906403,\n \"acc_norm_stderr\": 0.03295797566311271\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8606060606060606,\n \"acc_stderr\": 0.027045948825865394,\n \"acc_norm\": 0.8606060606060606,\n \"acc_norm_stderr\": 0.027045948825865394\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9292929292929293,\n \"acc_stderr\": 0.01826310542019949,\n \"acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.01826310542019949\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9740932642487047,\n \"acc_stderr\": 0.01146452335695315,\n \"acc_norm\": 0.9740932642487047,\n \"acc_norm_stderr\": 0.01146452335695315\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8076923076923077,\n \"acc_stderr\": 0.019982347208637306,\n \"acc_norm\": 0.8076923076923077,\n \"acc_norm_stderr\": 0.019982347208637306\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.45925925925925926,\n \"acc_stderr\": 0.03038416923235083,\n \"acc_norm\": 0.45925925925925926,\n \"acc_norm_stderr\": 0.03038416923235083\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.02300545944667394,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.02300545944667394\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.5364238410596026,\n \"acc_stderr\": 0.04071636065944216,\n \"acc_norm\": 0.5364238410596026,\n \"acc_norm_stderr\": 0.04071636065944216\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9192660550458716,\n \"acc_stderr\": 0.011680172292862083,\n \"acc_norm\": 0.9192660550458716,\n \"acc_norm_stderr\": 0.011680172292862083\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6712962962962963,\n \"acc_stderr\": 0.032036140846700596,\n \"acc_norm\": 0.6712962962962963,\n \"acc_norm_stderr\": 0.032036140846700596\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9313725490196079,\n \"acc_stderr\": 0.017744453647073322,\n \"acc_norm\": 0.9313725490196079,\n \"acc_norm_stderr\": 0.017744453647073322\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9113924050632911,\n \"acc_stderr\": 0.018498315206865384,\n \"acc_norm\": 0.9113924050632911,\n \"acc_norm_stderr\": 0.018498315206865384\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8778625954198473,\n \"acc_stderr\": 0.028718776889342344,\n \"acc_norm\": 0.8778625954198473,\n \"acc_norm_stderr\": 0.028718776889342344\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8842975206611571,\n \"acc_stderr\": 0.029199802455622804,\n \"acc_norm\": 0.8842975206611571,\n \"acc_norm_stderr\": 0.029199802455622804\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8518518518518519,\n \"acc_stderr\": 0.03434300243630999,\n \"acc_norm\": 0.8518518518518519,\n \"acc_norm_stderr\": 0.03434300243630999\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.8404907975460123,\n \"acc_stderr\": 0.02876748172598386,\n \"acc_norm\": 0.8404907975460123,\n \"acc_norm_stderr\": 0.02876748172598386\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5625,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.5625,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9444444444444444,\n \"acc_stderr\": 0.01500631280644693,\n \"acc_norm\": 0.9444444444444444,\n \"acc_norm_stderr\": 0.01500631280644693\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466143,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466143\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9106002554278416,\n \"acc_stderr\": 0.0102030178476883,\n \"acc_norm\": 0.9106002554278416,\n \"acc_norm_stderr\": 0.0102030178476883\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8208092485549133,\n \"acc_stderr\": 0.020647590029679332,\n \"acc_norm\": 0.8208092485549133,\n \"acc_norm_stderr\": 0.020647590029679332\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.7631284916201118,\n \"acc_stderr\": 0.014219570788103986,\n \"acc_norm\": 0.7631284916201118,\n \"acc_norm_stderr\": 0.014219570788103986\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8496732026143791,\n \"acc_stderr\": 0.020464175124332625,\n \"acc_norm\": 0.8496732026143791,\n \"acc_norm_stderr\": 0.020464175124332625\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8006430868167203,\n \"acc_stderr\": 0.022691033780549656,\n \"acc_norm\": 0.8006430868167203,\n \"acc_norm_stderr\": 0.022691033780549656\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8765432098765432,\n \"acc_stderr\": 0.01830386880689179,\n \"acc_norm\": 0.8765432098765432,\n \"acc_norm_stderr\": 0.01830386880689179\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6205673758865248,\n \"acc_stderr\": 0.028947338851614098,\n \"acc_norm\": 0.6205673758865248,\n \"acc_norm_stderr\": 0.028947338851614098\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5925684485006519,\n \"acc_stderr\": 0.012549473714212224,\n \"acc_norm\": 0.5925684485006519,\n \"acc_norm_stderr\": 0.012549473714212224\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8161764705882353,\n \"acc_stderr\": 0.02352924218519311,\n \"acc_norm\": 0.8161764705882353,\n \"acc_norm_stderr\": 0.02352924218519311\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8186274509803921,\n \"acc_stderr\": 0.01558864349537047,\n \"acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.01558864349537047\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7181818181818181,\n \"acc_stderr\": 0.04309118709946458,\n \"acc_norm\": 0.7181818181818181,\n \"acc_norm_stderr\": 0.04309118709946458\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8408163265306122,\n \"acc_stderr\": 0.02342097206916635,\n \"acc_norm\": 0.8408163265306122,\n \"acc_norm_stderr\": 0.02342097206916635\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.900497512437811,\n \"acc_stderr\": 0.021166216304659407,\n \"acc_norm\": 0.900497512437811,\n \"acc_norm_stderr\": 0.021166216304659407\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.02876234912646613,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.02876234912646613\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.572289156626506,\n \"acc_stderr\": 0.038515976837185335,\n \"acc_norm\": 0.572289156626506,\n \"acc_norm_stderr\": 0.038515976837185335\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8713450292397661,\n \"acc_stderr\": 0.025679342723276908,\n \"acc_norm\": 0.8713450292397661,\n \"acc_norm_stderr\": 0.025679342723276908\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5250917992656059,\n \"mc1_stderr\": 0.01748144680410401,\n \"mc2\": 0.6967876357426273,\n \"mc2_stderr\": 0.014243776412915276\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8342541436464088,\n \"acc_stderr\": 0.010450899545370616\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7247915087187263,\n \"acc_stderr\": 0.012302114305862656\n }\n}\n```", "repo_url": "https://huggingface.co/ConvexAI/Luminex-34B-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|arc:challenge|25_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|gsm8k|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hellaswag|10_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T02-55-24.187790.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["**/details_harness|winogrande|5_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T02-55-24.187790.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T02_55_24.187790", "path": ["results_2024-02-17T02-55-24.187790.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T02-55-24.187790.parquet"]}]}]} | 2024-02-17T02:58:13+00:00 |
d0b8d37d0e9d135af100d49cba004ad6c5034008 | Yourmom123/GamerscapeLab | [
"license:apache-2.0",
"region:us"
] | 2024-02-17T03:17:05+00:00 | {"license": "apache-2.0"} | 2024-02-17T03:18:12+00:00 |
|
81292271fe2b086c16290292dc7062e91d933fe0 | adambjorn/UnrelatedForgettingOverhead | [
"license:openrail",
"region:us"
] | 2024-02-17T03:20:16+00:00 | {"license": "openrail", "dataset_info": [{"config_name": "dialogsum", "features": [{"name": "sentence1", "dtype": "string"}, {"name": "sentence2", "dtype": "string"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "not_equivalent", "1": "equivalent"}}}}, {"name": "idx", "dtype": "int32"}], "splits": [{"name": "train", "num_bytes": 52320, "num_examples": 200}, {"name": "test", "num_bytes": 19167, "num_examples": 75}], "download_size": 58432, "dataset_size": 71487}, {"config_name": "glue-mrpc", "features": [{"name": "sentence1", "dtype": "string"}, {"name": "sentence2", "dtype": "string"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "not_equivalent", "1": "equivalent"}}}}, {"name": "idx", "dtype": "int32"}], "splits": [{"name": "train", "num_bytes": 52320, "num_examples": 200}, {"name": "test", "num_bytes": 19167, "num_examples": 75}], "download_size": 58432, "dataset_size": 71487}, {"config_name": "ropes", "features": [{"name": "id", "dtype": "string"}, {"name": "background", "dtype": "string"}, {"name": "situation", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answers", "sequence": [{"name": "text", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 219720, "num_examples": 200}, {"name": "test", "num_bytes": 80161, "num_examples": 75}], "download_size": 166639, "dataset_size": 299881}, {"config_name": "winograd_wsc", "features": [{"name": "text", "dtype": "string"}, {"name": "pronoun", "dtype": "string"}, {"name": "pronoun_loc", "dtype": "int32"}, {"name": "quote", "dtype": "string"}, {"name": "quote_loc", "dtype": "int32"}, {"name": "options", "sequence": "string"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "0", "1": "1"}}}}, {"name": "source", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 37288, "num_examples": 200}, {"name": "test", "num_bytes": 13852, "num_examples": 75}], "download_size": 36050, "dataset_size": 51140}], "configs": [{"config_name": "dialogsum", "data_files": [{"split": "train", "path": "dialogsum/train-*"}, {"split": "test", "path": "dialogsum/test-*"}]}, {"config_name": "glue-mrpc", "data_files": [{"split": "train", "path": "glue-mrpc/train-*"}, {"split": "test", "path": "glue-mrpc/test-*"}]}, {"config_name": "ropes", "data_files": [{"split": "train", "path": "ropes/train-*"}, {"split": "test", "path": "ropes/test-*"}]}, {"config_name": "winograd_wsc", "data_files": [{"split": "train", "path": "winograd_wsc/train-*"}, {"split": "test", "path": "winograd_wsc/test-*"}]}]} | 2024-02-17T04:22:16+00:00 |
|
5b2e52780a3cb1129d7ae1b7891739dd06173eed | robbo232323/gutenberg-block-from-next | [
"task_categories:text-generation",
"size_categories:n<1K",
"language:pl",
"region:us"
] | 2024-02-17T03:22:43+00:00 | {"language": ["pl"], "size_categories": ["n<1K"], "task_categories": ["text-generation"], "pretty_name": "Gutenberg Blocks from Next.js"} | 2024-02-17T03:23:41+00:00 |
|
0153e4ae75daf978d724850ef75ba16ac6702c71 | AIGym/all-recipes | [
"region:us"
] | 2024-02-17T03:23:22+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1569011376, "num_examples": 2147248}], "download_size": 807143413, "dataset_size": 1569011376}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T03:23:52+00:00 |
|
b8a523e87d62ae0f818c23620fde5fcb152f53bf | AIGym/custom-generated | [
"region:us"
] | 2024-02-17T03:31:21+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 38995, "num_examples": 293}], "download_size": 23868, "dataset_size": 38995}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T03:31:22+00:00 |
|
0440124bf59a5e44cfa56b54d19a7d413417f59d | AIGym/custom-generated2 | [
"region:us"
] | 2024-02-17T03:31:29+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 56106, "num_examples": 429}], "download_size": 30436, "dataset_size": 56106}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T03:31:30+00:00 |
|
cb4a1b8998dcc55009217c729b9207b346126be1 | wentingzhao/knn-prompt-datastore | [
"region:us"
] | 2024-02-17T03:37:16+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2360312955, "num_examples": 2934591}], "download_size": 1352870614, "dataset_size": 2360312955}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T03:38:06+00:00 |
|
283f36a618e1238123cd29bfbba6c0d876281053 | 0x7o/c4-ru-cleaned | [
"region:us"
] | 2024-02-17T03:41:38+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 249106481, "num_examples": 32711}], "download_size": 118213404, "dataset_size": 249106481}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T03:42:35+00:00 |
|
6c3308c4202509ce7d234c8958a0819a6b07449d | Literally just https://huggingface.co/datasets/teknium/OpenHermes-2.5 but converted to be usable in MLX lora training (assumes ChatML format) | N8Programs/openhermes-2.5-mlx | [
"region:us"
] | 2024-02-17T03:48:00+00:00 | {} | 2024-02-17T04:06:54+00:00 |
47c12997550f2885ba0b358f7910e55ba9e9e866 | GGital/Signal_Test05 | [
"region:us"
] | 2024-02-17T03:49:35+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "0", "1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6"}}}}], "splits": [{"name": "train", "num_bytes": 145648334.599, "num_examples": 4207}], "download_size": 150475009, "dataset_size": 145648334.599}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T03:50:01+00:00 |
|
e73989ed2fa96e4cdaccab1c830b5ce70d04c022 |
# Dataset Card for Evaluation run of DreadPoor/WhyAreWeStillHere-7B-slerp
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [DreadPoor/WhyAreWeStillHere-7B-slerp](https://huggingface.co/DreadPoor/WhyAreWeStillHere-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_DreadPoor__WhyAreWeStillHere-7B-slerp",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T03:56:00.783844](https://huggingface.co/datasets/open-llm-leaderboard/details_DreadPoor__WhyAreWeStillHere-7B-slerp/blob/main/results_2024-02-17T03-56-00.783844.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6545570581109811,
"acc_stderr": 0.032029886164546564,
"acc_norm": 0.6542595014266129,
"acc_norm_stderr": 0.03270002296417162,
"mc1": 0.5385556915544676,
"mc1_stderr": 0.017451384104637452,
"mc2": 0.6812168345875693,
"mc2_stderr": 0.015141577387322332
},
"harness|arc:challenge|25": {
"acc": 0.6988054607508533,
"acc_stderr": 0.013406741767847632,
"acc_norm": 0.7167235494880546,
"acc_norm_stderr": 0.013167478735134575
},
"harness|hellaswag|10": {
"acc": 0.7187811192989444,
"acc_stderr": 0.004486752200430352,
"acc_norm": 0.8824935271858195,
"acc_norm_stderr": 0.0032136470410029467
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6518518518518519,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.6518518518518519,
"acc_norm_stderr": 0.041153246103369526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7236842105263158,
"acc_stderr": 0.03639057569952928,
"acc_norm": 0.7236842105263158,
"acc_norm_stderr": 0.03639057569952928
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7094339622641509,
"acc_stderr": 0.02794321998933714,
"acc_norm": 0.7094339622641509,
"acc_norm_stderr": 0.02794321998933714
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7638888888888888,
"acc_stderr": 0.03551446610810826,
"acc_norm": 0.7638888888888888,
"acc_norm_stderr": 0.03551446610810826
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6416184971098265,
"acc_stderr": 0.03656343653353159,
"acc_norm": 0.6416184971098265,
"acc_norm_stderr": 0.03656343653353159
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107223,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107223
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5829787234042553,
"acc_stderr": 0.03223276266711712,
"acc_norm": 0.5829787234042553,
"acc_norm_stderr": 0.03223276266711712
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4824561403508772,
"acc_stderr": 0.04700708033551038,
"acc_norm": 0.4824561403508772,
"acc_norm_stderr": 0.04700708033551038
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5655172413793104,
"acc_stderr": 0.04130740879555498,
"acc_norm": 0.5655172413793104,
"acc_norm_stderr": 0.04130740879555498
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4312169312169312,
"acc_stderr": 0.025506481698138208,
"acc_norm": 0.4312169312169312,
"acc_norm_stderr": 0.025506481698138208
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4603174603174603,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.4603174603174603,
"acc_norm_stderr": 0.04458029125470973
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7903225806451613,
"acc_stderr": 0.02315787934908352,
"acc_norm": 0.7903225806451613,
"acc_norm_stderr": 0.02315787934908352
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4975369458128079,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.4975369458128079,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8080808080808081,
"acc_stderr": 0.028057791672989017,
"acc_norm": 0.8080808080808081,
"acc_norm_stderr": 0.028057791672989017
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9015544041450777,
"acc_stderr": 0.021500249576033484,
"acc_norm": 0.9015544041450777,
"acc_norm_stderr": 0.021500249576033484
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6641025641025641,
"acc_stderr": 0.023946724741563976,
"acc_norm": 0.6641025641025641,
"acc_norm_stderr": 0.023946724741563976
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.32222222222222224,
"acc_stderr": 0.028493465091028593,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.028493465091028593
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6848739495798319,
"acc_stderr": 0.030176808288974337,
"acc_norm": 0.6848739495798319,
"acc_norm_stderr": 0.030176808288974337
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.4105960264900662,
"acc_stderr": 0.04016689594849929,
"acc_norm": 0.4105960264900662,
"acc_norm_stderr": 0.04016689594849929
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8458715596330275,
"acc_stderr": 0.015480826865374307,
"acc_norm": 0.8458715596330275,
"acc_norm_stderr": 0.015480826865374307
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.034093869469927006,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.034093869469927006
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8529411764705882,
"acc_stderr": 0.024857478080250458,
"acc_norm": 0.8529411764705882,
"acc_norm_stderr": 0.024857478080250458
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8185654008438819,
"acc_stderr": 0.025085961144579654,
"acc_norm": 0.8185654008438819,
"acc_norm_stderr": 0.025085961144579654
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.695067264573991,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.695067264573991,
"acc_norm_stderr": 0.030898610882477515
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7938931297709924,
"acc_stderr": 0.03547771004159465,
"acc_norm": 0.7938931297709924,
"acc_norm_stderr": 0.03547771004159465
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7851239669421488,
"acc_stderr": 0.037494924487096966,
"acc_norm": 0.7851239669421488,
"acc_norm_stderr": 0.037494924487096966
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.038260763248848646,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.038260763248848646
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7668711656441718,
"acc_stderr": 0.03322015795776741,
"acc_norm": 0.7668711656441718,
"acc_norm_stderr": 0.03322015795776741
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4375,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04708567521880525
},
"harness|hendrycksTest-management|5": {
"acc": 0.7572815533980582,
"acc_stderr": 0.04245022486384495,
"acc_norm": 0.7572815533980582,
"acc_norm_stderr": 0.04245022486384495
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8846153846153846,
"acc_stderr": 0.02093019318517933,
"acc_norm": 0.8846153846153846,
"acc_norm_stderr": 0.02093019318517933
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.72,
"acc_norm_stderr": 0.045126085985421276
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8275862068965517,
"acc_stderr": 0.013507943909371802,
"acc_norm": 0.8275862068965517,
"acc_norm_stderr": 0.013507943909371802
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7543352601156069,
"acc_stderr": 0.023176298203992002,
"acc_norm": 0.7543352601156069,
"acc_norm_stderr": 0.023176298203992002
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.46033519553072627,
"acc_stderr": 0.01666979959211203,
"acc_norm": 0.46033519553072627,
"acc_norm_stderr": 0.01666979959211203
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7254901960784313,
"acc_stderr": 0.025553169991826524,
"acc_norm": 0.7254901960784313,
"acc_norm_stderr": 0.025553169991826524
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.707395498392283,
"acc_stderr": 0.02583989833487798,
"acc_norm": 0.707395498392283,
"acc_norm_stderr": 0.02583989833487798
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7407407407407407,
"acc_stderr": 0.02438366553103545,
"acc_norm": 0.7407407407407407,
"acc_norm_stderr": 0.02438366553103545
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5070921985815603,
"acc_stderr": 0.02982449855912901,
"acc_norm": 0.5070921985815603,
"acc_norm_stderr": 0.02982449855912901
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4654498044328553,
"acc_stderr": 0.0127397115540457,
"acc_norm": 0.4654498044328553,
"acc_norm_stderr": 0.0127397115540457
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6911764705882353,
"acc_stderr": 0.02806499816704009,
"acc_norm": 0.6911764705882353,
"acc_norm_stderr": 0.02806499816704009
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6683006535947712,
"acc_stderr": 0.01904748523936038,
"acc_norm": 0.6683006535947712,
"acc_norm_stderr": 0.01904748523936038
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7224489795918367,
"acc_stderr": 0.02866685779027465,
"acc_norm": 0.7224489795918367,
"acc_norm_stderr": 0.02866685779027465
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.025538433368578337,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.025538433368578337
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774708,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774708
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5421686746987951,
"acc_stderr": 0.038786267710023595,
"acc_norm": 0.5421686746987951,
"acc_norm_stderr": 0.038786267710023595
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8421052631578947,
"acc_stderr": 0.027966785859160893,
"acc_norm": 0.8421052631578947,
"acc_norm_stderr": 0.027966785859160893
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5385556915544676,
"mc1_stderr": 0.017451384104637452,
"mc2": 0.6812168345875693,
"mc2_stderr": 0.015141577387322332
},
"harness|winogrande|5": {
"acc": 0.8547750591949487,
"acc_stderr": 0.009902153904760824
},
"harness|gsm8k|5": {
"acc": 0.6535253980288097,
"acc_stderr": 0.013107179054313403
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_DreadPoor__WhyAreWeStillHere-7B-slerp | [
"region:us"
] | 2024-02-17T03:58:18+00:00 | {"pretty_name": "Evaluation run of DreadPoor/WhyAreWeStillHere-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [DreadPoor/WhyAreWeStillHere-7B-slerp](https://huggingface.co/DreadPoor/WhyAreWeStillHere-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DreadPoor__WhyAreWeStillHere-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T03:56:00.783844](https://huggingface.co/datasets/open-llm-leaderboard/details_DreadPoor__WhyAreWeStillHere-7B-slerp/blob/main/results_2024-02-17T03-56-00.783844.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6545570581109811,\n \"acc_stderr\": 0.032029886164546564,\n \"acc_norm\": 0.6542595014266129,\n \"acc_norm_stderr\": 0.03270002296417162,\n \"mc1\": 0.5385556915544676,\n \"mc1_stderr\": 0.017451384104637452,\n \"mc2\": 0.6812168345875693,\n \"mc2_stderr\": 0.015141577387322332\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6988054607508533,\n \"acc_stderr\": 0.013406741767847632,\n \"acc_norm\": 0.7167235494880546,\n \"acc_norm_stderr\": 0.013167478735134575\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7187811192989444,\n \"acc_stderr\": 0.004486752200430352,\n \"acc_norm\": 0.8824935271858195,\n \"acc_norm_stderr\": 0.0032136470410029467\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7236842105263158,\n \"acc_stderr\": 0.03639057569952928,\n \"acc_norm\": 0.7236842105263158,\n \"acc_norm_stderr\": 0.03639057569952928\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.02794321998933714,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.02794321998933714\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.03656343653353159,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.03656343653353159\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4312169312169312,\n \"acc_stderr\": 0.025506481698138208,\n \"acc_norm\": 0.4312169312169312,\n \"acc_norm_stderr\": 0.025506481698138208\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7903225806451613,\n \"acc_stderr\": 0.02315787934908352,\n \"acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.02315787934908352\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8080808080808081,\n \"acc_stderr\": 0.028057791672989017,\n \"acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.028057791672989017\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033484,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033484\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4105960264900662,\n \"acc_stderr\": 0.04016689594849929,\n \"acc_norm\": 0.4105960264900662,\n \"acc_norm_stderr\": 0.04016689594849929\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374307,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374307\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.024857478080250458,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.024857478080250458\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.025085961144579654,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.025085961144579654\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.03322015795776741,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.03322015795776741\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.02093019318517933,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.02093019318517933\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371802,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371802\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7543352601156069,\n \"acc_stderr\": 0.023176298203992002,\n \"acc_norm\": 0.7543352601156069,\n \"acc_norm_stderr\": 0.023176298203992002\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.46033519553072627,\n \"acc_stderr\": 0.01666979959211203,\n \"acc_norm\": 0.46033519553072627,\n \"acc_norm_stderr\": 0.01666979959211203\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.025553169991826524,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.025553169991826524\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.02438366553103545,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.02438366553103545\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4654498044328553,\n \"acc_stderr\": 0.0127397115540457,\n \"acc_norm\": 0.4654498044328553,\n \"acc_norm_stderr\": 0.0127397115540457\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6911764705882353,\n \"acc_stderr\": 0.02806499816704009,\n \"acc_norm\": 0.6911764705882353,\n \"acc_norm_stderr\": 0.02806499816704009\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6683006535947712,\n \"acc_stderr\": 0.01904748523936038,\n \"acc_norm\": 0.6683006535947712,\n \"acc_norm_stderr\": 0.01904748523936038\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.02866685779027465,\n \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.02866685779027465\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.038786267710023595,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.038786267710023595\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160893,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160893\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5385556915544676,\n \"mc1_stderr\": 0.017451384104637452,\n \"mc2\": 0.6812168345875693,\n \"mc2_stderr\": 0.015141577387322332\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8547750591949487,\n \"acc_stderr\": 0.009902153904760824\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6535253980288097,\n \"acc_stderr\": 0.013107179054313403\n }\n}\n```", "repo_url": "https://huggingface.co/DreadPoor/WhyAreWeStillHere-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|arc:challenge|25_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|gsm8k|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hellaswag|10_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T03-56-00.783844.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["**/details_harness|winogrande|5_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T03-56-00.783844.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T03_56_00.783844", "path": ["results_2024-02-17T03-56-00.783844.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T03-56-00.783844.parquet"]}]}]} | 2024-02-17T03:58:39+00:00 |
2b1d56470db24b24249fc821108753c63d8ac766 | manishiitg/Open-Orca-SlimOrca-Dedup | [
"region:us"
] | 2024-02-17T03:59:24+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1794794630, "num_examples": 646394}], "download_size": 776232300, "dataset_size": 1794794630}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:16:41+00:00 |
|
a6193f8d72abb2df4d5ea06b21e7bee1eba39776 | manishiitg/boolq | [
"region:us"
] | 2024-02-17T04:01:31+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 26735824, "num_examples": 18844}], "download_size": 10362661, "dataset_size": 26735824}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:17:20+00:00 |
|
de415925d4729d9f8da8f461a5c49c655eda32e7 |
带有难负例的检索训练数据。约20万。
文件格式:jsonl。单行示例:
```
{"Query": "大熊猫的饮食习性", "Positive Document": "大熊猫主要以竹子为食,但也会吃水果和小型动物。它们拥有强壮的颌部和牙齿,能够咬碎竹子坚硬的外壳。", "Hard Negative Document": "老虎是肉食性动物,主要捕食鹿、野猪等大型动物。它们的牙齿和爪子非常锋利,是捕猎的利器。"}
``` | infgrad/retrieval_data_llm | [
"size_categories:100K<n<1M",
"language:zh",
"license:mit",
"region:us"
] | 2024-02-17T04:13:09+00:00 | {"language": ["zh"], "license": "mit", "size_categories": ["100K<n<1M"]} | 2024-02-17T04:16:25+00:00 |
0b6e44e551efef5fee9d7b78bf0ae5224a470441 | manishiitg/cais-mmlu-train | [
"region:us"
] | 2024-02-17T04:17:34+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 626910367, "num_examples": 199684}], "download_size": 142441315, "dataset_size": 626910367}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:17:47+00:00 |
|
9b82df3ec41551933829bbb219f17dcee26302f1 | manishiitg/squad_v2 | [
"region:us"
] | 2024-02-17T04:18:05+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 314202365, "num_examples": 173518}], "download_size": 53117449, "dataset_size": 314202365}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:18:08+00:00 |
|
440b63b92d96c99c010427d549cfdd4b44094d43 | manishiitg/camel-ai-physics | [
"region:us"
] | 2024-02-17T04:18:32+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 173711856, "num_examples": 40000}], "download_size": 57766434, "dataset_size": 173711856}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:18:35+00:00 |
|
a37b808efd5061aab02cb08142c86ca39d54600d | manishiitg/camel-ai-chemistry | [
"region:us"
] | 2024-02-17T04:19:03+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 159637053, "num_examples": 40000}], "download_size": 52279570, "dataset_size": 159637053}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:19:07+00:00 |
|
d7acd6729683e7d3070563c2d54f2b272ff6d816 | manishiitg/camel-ai-biology | [
"region:us"
] | 2024-02-17T04:19:36+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 216893510, "num_examples": 40000}], "download_size": 71961972, "dataset_size": 216893510}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:19:40+00:00 |
|
70a051f276021fd2f12360a30950120d30aeacd5 | manishiitg/manishiitg-CogStack-Tasks | [
"region:us"
] | 2024-02-17T04:19:45+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 16823435, "num_examples": 9378}], "download_size": 7536397, "dataset_size": 16823435}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:20:00+00:00 |
|
704dc5de089566d6af718473aeafa4a8ce7fa563 | manishiitg/manishiitg-CogStack-QA | [
"region:us"
] | 2024-02-17T04:19:54+00:00 | {"dataset_info": {"features": [{"name": "system", "dtype": "string"}, {"name": "instruction", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "lang", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 30586396, "num_examples": 49330}], "download_size": 11513745, "dataset_size": 30586396}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:19:56+00:00 |
|
37ab9243387fe08f5eac62d9b4fc0b0a85970579 | manishiitg/truthful_qa | [
"region:us"
] | 2024-02-17T04:26:31+00:00 | {"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "mc1_targets", "struct": [{"name": "choices", "sequence": "string"}]}, {"name": "mc2_targets", "struct": [{"name": "choices", "sequence": "string"}, {"name": "labels", "sequence": "int64"}]}], "splits": [{"name": "validation", "num_bytes": 59921, "num_examples": 66}], "download_size": 25637, "dataset_size": 59921}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]} | 2024-02-17T04:26:32+00:00 |
|
3144d94cb4bb79fa0a2fc7349de5070d82236e97 | acoa/guanaco-llama2-1k | [
"region:us"
] | 2024-02-17T04:27:39+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1654448, "num_examples": 1000}], "download_size": 966692, "dataset_size": 1654448}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:27:41+00:00 |
|
d1e0b5337768606076e7a31a5ae2aa14136df3e5 | benayas/massive_augmented_5pct_v0 | [
"region:us"
] | 2024-02-17T04:28:46+00:00 | {"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "locale", "dtype": "string"}, {"name": "partition", "dtype": "string"}, {"name": "scenario", "dtype": "float64"}, {"name": "intent", "dtype": "float64"}, {"name": "utt", "dtype": "string"}, {"name": "annot_utt", "dtype": "string"}, {"name": "worker_id", "dtype": "string"}, {"name": "slot_method", "struct": [{"name": "method", "sequence": "null"}, {"name": "slot", "sequence": "null"}]}, {"name": "judgments", "struct": [{"name": "grammar_score", "sequence": "int8"}, {"name": "intent_score", "sequence": "int8"}, {"name": "language_identification", "sequence": "null"}, {"name": "slots_score", "sequence": "int8"}, {"name": "spelling_score", "sequence": "int8"}, {"name": "worker_id", "sequence": "null"}]}, {"name": "category", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1670233, "num_examples": 11514}], "download_size": 401934, "dataset_size": 1670233}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:28:48+00:00 |
|
24a917147cd8a20e4702b782e3c934c9780ccb52 | # Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
This dataset is a condensed subset derived from the SetFit/enron_spam dataset. It comprises two primary columns: 'Text' and 'Label'. The dataset contains 1000 samples for training and 1000 samples for testing, making it suitable for binary text classification tasks.
## Dataset Details
### Columns
- **Text:** Represents the content of an email.
- **Label:** Indicates whether the email is categorized as 'spam' or 'ham' (non-spam).
### Dataset Sources
- **Repository:** https://huggingface.co/datasets/SetFit/enron_spam
## Uses
### Direct Use
```
from datasets import load_dataset
spam_dataset = load_dataset("likhith231/enron_spam_small")
``` | likhith231/enron_spam_small | [
"task_categories:text-classification",
"region:us"
] | 2024-02-17T04:36:55+00:00 | {"task_categories": ["text-classification"], "dataset_info": {"features": [{"name": "label", "dtype": "int64"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1452847, "num_examples": 1000}, {"name": "validation", "num_bytes": 1685310, "num_examples": 1000}], "download_size": 1637839, "dataset_size": 3138157}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}]}]} | 2024-02-17T04:57:46+00:00 |
ad2164b8d42095891cdfde3612a7711c6a9f3b68 | KentoTsu/kekento | [
"license:openrail",
"region:us"
] | 2024-02-17T04:40:32+00:00 | {"license": "openrail"} | 2024-02-17T04:40:49+00:00 |
|
8be01f2fa84ed28a11c410e31a7fffdb6626563b | GGital/Signal_Test06 | [
"region:us"
] | 2024-02-17T04:47:55+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "0", "1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6"}}}}], "splits": [{"name": "train", "num_bytes": 286694206.51, "num_examples": 4207}], "download_size": 269451547, "dataset_size": 286694206.51}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:48:18+00:00 |
|
fb658011929aca1d96215d6a9a7a682ca90ac930 | Ziggy1/Storm | [
"region:us"
] | 2024-02-17T04:48:19+00:00 | {} | 2024-02-17T04:48:46+00:00 |
|
e1cbd213aa33236546b0098de9b5501e32a1c875 | priyald/tem2_data | [
"license:apache-2.0",
"region:us"
] | 2024-02-17T04:51:04+00:00 | {"license": "apache-2.0"} | 2024-02-17T04:51:41+00:00 |
|
25c9c011322a51e4794b4eff5590cf011d64463e | AIGym/imdb | [
"region:us"
] | 2024-02-17T04:56:39+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 33232823, "num_examples": 25000}], "download_size": 20972003, "dataset_size": 33232823}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:56:43+00:00 |
|
e9b84feb3c0e3cda39ba691fd8bdaeaa2b25d913 | biglab/jitteredwebsites-2.1-224 | [
"region:us"
] | 2024-02-17T04:57:06+00:00 | {"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 2867913957.495, "num_examples": 325481}, {"name": "validation", "num_bytes": 373776688.105, "num_examples": 41593}, {"name": "test", "num_bytes": 386615918.936, "num_examples": 41482}], "download_size": 3712040764, "dataset_size": 3628306564.536}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T06:29:22+00:00 |
|
31c047b5b996f4ddaa406eb9e246c294f3514fdd | AIGym/yelp | [
"region:us"
] | 2024-02-17T04:58:02+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 478611554, "num_examples": 650000}], "download_size": 298976195, "dataset_size": 478611554}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:58:15+00:00 |
|
fd28920c5e5f86ff6257b7e4f46f0b0e694dacb5 | AIGym/HotelReviews | [
"region:us"
] | 2024-02-17T04:58:33+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 99237912, "num_examples": 515738}], "download_size": 55704545, "dataset_size": 99237912}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T04:58:36+00:00 |
|
a50e3e8f18abe2f8e2d89c3b19ae281066e6cb95 |
Total train samples: 26381
Total test samples: 9868
Total tasks: 8
| Task | Train | Test |
| ---- | ----- | ---- |
|reference_number_association_without_question_boxes/2024-02-12|3026|633|
|reference_numbers/2024-02-12|3058|678|
|reference_number_association_with_question_boxes/2024-02-12|3032|633|
|table_cell_incremental_without_question_boxes/2024-02-12|3438|1481|
|table_cell_incremental_with_question_boxes/2024-02-12|4053|1422|
|table_header_with_question_boxes/2024-02-12|3714|3731|
|key_value/2024-02-12|3020|682|
|label_and_location/2024-02-12|3040|608|
Total artifact_qids: 5764
| looppayments/question_answering_token_classification_2024_02_01 | [
"region:us"
] | 2024-02-17T04:59:58+00:00 | {"pretty_name": "Question Answering Token Classification"} | 2024-02-17T05:32:45+00:00 |
476447b5d832b5a694bdbf34c20f7b42f6869e78 | AIGym/ade-corpus | [
"region:us"
] | 2024-02-17T05:00:36+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3215571, "num_examples": 23516}], "download_size": 1698920, "dataset_size": 3215571}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T05:00:37+00:00 |
|
2913d79b8f6d86fcdb0e8fc423f1287e1ed4280e | PhilSad/Alpaca_french_instruct_sft | [
"region:us"
] | 2024-02-17T05:10:26+00:00 | {"dataset_info": {"features": [{"name": "__index_level_0__", "dtype": "int64"}, {"name": "prompt", "dtype": "string"}, {"name": "completion", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 32501020.68812738, "num_examples": 49401}, {"name": "test", "num_bytes": 1711203.3118726204, "num_examples": 2601}], "download_size": 15399057, "dataset_size": 34212224.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T05:11:55+00:00 |
|
d0740b76184a8b43111ac5f22993de0e734f78af | AIGym/small-pile | [
"region:us"
] | 2024-02-17T05:11:34+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 602060516, "num_examples": 100000}], "download_size": 318168347, "dataset_size": 602060516}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T05:11:50+00:00 |
|
0c69adf81479f425e03dd69a5d6b4081c248de47 | AIGym/news | [
"region:us"
] | 2024-02-17T05:15:17+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 13828692, "num_examples": 11314}], "download_size": 8908140, "dataset_size": 13828692}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T05:15:19+00:00 |
|
d5fa8aba9975e206e80b0e2af1d60db73dbfaa7a | hyperdemocracy/tmp | [
"region:us"
] | 2024-02-17T05:22:56+00:00 | {} | 2024-02-17T16:07:59+00:00 |
|
03f63fdad1b98d17c27cb5b1514c825058c829ce | NatarajanRayi/animal-detection | [
"license:openrail",
"region:us"
] | 2024-02-17T05:31:19+00:00 | {"license": "openrail"} | 2024-02-17T05:31:19+00:00 |
|
b3f48fc4558651e3df0415d1aedf4a06e3da7983 | AIGym/cc_news | [
"region:us"
] | 2024-02-17T05:35:42+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1707241603, "num_examples": 708241}], "download_size": 970495297, "dataset_size": 1707241603}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T05:36:20+00:00 |
|
82cd2b51025bb13bfc3db04dfa099dbbc2632d1f | AIGym/ebook | [
"region:us"
] | 2024-02-17T05:36:40+00:00 | {"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 87851640, "num_examples": 646}], "download_size": 53266337, "dataset_size": 87851640}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T05:36:43+00:00 |
|
d32dc653120e025e4266a254d3a183a9ce89d528 | Atipico1/nq-test-adv_passage | [
"region:us"
] | 2024-02-17T05:45:08+00:00 | {"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "entity", "dtype": "string"}, {"name": "similar_entity", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "ctxs", "list": [{"name": "hasanswer", "dtype": "bool"}, {"name": "score", "dtype": "float64"}, {"name": "text", "dtype": "string"}, {"name": "title", "dtype": "string"}]}, {"name": "masked_query", "dtype": "string"}, {"name": "original_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "unans_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "conflict_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "conflict_context", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "context", "dtype": "string"}, {"name": "context_vague", "dtype": "string"}, {"name": "entities", "dtype": "string"}, {"name": "entities_count", "dtype": "int64"}, {"name": "adv_sent", "dtype": "string"}, {"name": "adv_passage", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 58397824, "num_examples": 3610}], "download_size": 34346195, "dataset_size": 58397824}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T05:45:18+00:00 |
|
12dd2615c68b80d77bd1f5e5073bc460d8c015d9 | LightFury9/tenglish_vocab | [
"region:us"
] | 2024-02-17T05:56:34+00:00 | {} | 2024-02-17T16:05:07+00:00 |
|
faf87157dcc4af9a03b0feaa31da854354c3c5f7 |
# Dataset Card for Evaluation run of giraffe176/Open_Maid_Samantha_Hermes_Orca_dare_ties
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [giraffe176/Open_Maid_Samantha_Hermes_Orca_dare_ties](https://huggingface.co/giraffe176/Open_Maid_Samantha_Hermes_Orca_dare_ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_giraffe176__Open_Maid_Samantha_Hermes_Orca_dare_ties",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T05:56:05.382821](https://huggingface.co/datasets/open-llm-leaderboard/details_giraffe176__Open_Maid_Samantha_Hermes_Orca_dare_ties/blob/main/results_2024-02-17T05-56-05.382821.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6492929541395905,
"acc_stderr": 0.03204314290781419,
"acc_norm": 0.6502356687496237,
"acc_norm_stderr": 0.032693608758353816,
"mc1": 0.41003671970624234,
"mc1_stderr": 0.017217844717449325,
"mc2": 0.5797198662912402,
"mc2_stderr": 0.015180976093776475
},
"harness|arc:challenge|25": {
"acc": 0.6390784982935154,
"acc_stderr": 0.014034761386175456,
"acc_norm": 0.6774744027303754,
"acc_norm_stderr": 0.013659980894277366
},
"harness|hellaswag|10": {
"acc": 0.6803425612427804,
"acc_stderr": 0.004653907471785644,
"acc_norm": 0.8638717386974706,
"acc_norm_stderr": 0.003422238702226359
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6,
"acc_stderr": 0.04232073695151589,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04232073695151589
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6776315789473685,
"acc_stderr": 0.03803510248351585,
"acc_norm": 0.6776315789473685,
"acc_norm_stderr": 0.03803510248351585
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7094339622641509,
"acc_stderr": 0.027943219989337128,
"acc_norm": 0.7094339622641509,
"acc_norm_stderr": 0.027943219989337128
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.75,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.75,
"acc_norm_stderr": 0.03621034121889507
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.653179190751445,
"acc_stderr": 0.036291466701596636,
"acc_norm": 0.653179190751445,
"acc_norm_stderr": 0.036291466701596636
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107224,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107224
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.76,
"acc_norm_stderr": 0.042923469599092816
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5787234042553191,
"acc_stderr": 0.03227834510146267,
"acc_norm": 0.5787234042553191,
"acc_norm_stderr": 0.03227834510146267
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5175438596491229,
"acc_stderr": 0.04700708033551038,
"acc_norm": 0.5175438596491229,
"acc_norm_stderr": 0.04700708033551038
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5517241379310345,
"acc_stderr": 0.04144311810878152,
"acc_norm": 0.5517241379310345,
"acc_norm_stderr": 0.04144311810878152
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42063492063492064,
"acc_stderr": 0.025424835086924003,
"acc_norm": 0.42063492063492064,
"acc_norm_stderr": 0.025424835086924003
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4603174603174603,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.4603174603174603,
"acc_norm_stderr": 0.04458029125470973
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7838709677419354,
"acc_stderr": 0.02341529343356852,
"acc_norm": 0.7838709677419354,
"acc_norm_stderr": 0.02341529343356852
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5123152709359606,
"acc_stderr": 0.035169204442208966,
"acc_norm": 0.5123152709359606,
"acc_norm_stderr": 0.035169204442208966
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.03192271569548301,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.03192271569548301
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.029126522834586818,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.029126522834586818
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9015544041450777,
"acc_stderr": 0.021500249576033456,
"acc_norm": 0.9015544041450777,
"acc_norm_stderr": 0.021500249576033456
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6641025641025641,
"acc_stderr": 0.02394672474156398,
"acc_norm": 0.6641025641025641,
"acc_norm_stderr": 0.02394672474156398
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.362962962962963,
"acc_stderr": 0.02931820364520686,
"acc_norm": 0.362962962962963,
"acc_norm_stderr": 0.02931820364520686
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7016806722689075,
"acc_stderr": 0.02971914287634286,
"acc_norm": 0.7016806722689075,
"acc_norm_stderr": 0.02971914287634286
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.03861557546255169,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.03861557546255169
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8366972477064221,
"acc_stderr": 0.01584825580650155,
"acc_norm": 0.8366972477064221,
"acc_norm_stderr": 0.01584825580650155
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.034093869469927006,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.034093869469927006
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8088235294117647,
"acc_stderr": 0.027599174300640766,
"acc_norm": 0.8088235294117647,
"acc_norm_stderr": 0.027599174300640766
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8016877637130801,
"acc_stderr": 0.02595502084162113,
"acc_norm": 0.8016877637130801,
"acc_norm_stderr": 0.02595502084162113
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6860986547085202,
"acc_stderr": 0.031146796482972465,
"acc_norm": 0.6860986547085202,
"acc_norm_stderr": 0.031146796482972465
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7938931297709924,
"acc_stderr": 0.03547771004159465,
"acc_norm": 0.7938931297709924,
"acc_norm_stderr": 0.03547771004159465
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7768595041322314,
"acc_stderr": 0.03800754475228732,
"acc_norm": 0.7768595041322314,
"acc_norm_stderr": 0.03800754475228732
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7870370370370371,
"acc_stderr": 0.0395783547198098,
"acc_norm": 0.7870370370370371,
"acc_norm_stderr": 0.0395783547198098
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7730061349693251,
"acc_stderr": 0.03291099578615769,
"acc_norm": 0.7730061349693251,
"acc_norm_stderr": 0.03291099578615769
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5089285714285714,
"acc_stderr": 0.04745033255489123,
"acc_norm": 0.5089285714285714,
"acc_norm_stderr": 0.04745033255489123
},
"harness|hendrycksTest-management|5": {
"acc": 0.7864077669902912,
"acc_stderr": 0.040580420156460344,
"acc_norm": 0.7864077669902912,
"acc_norm_stderr": 0.040580420156460344
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8675213675213675,
"acc_stderr": 0.022209309073165612,
"acc_norm": 0.8675213675213675,
"acc_norm_stderr": 0.022209309073165612
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8339719029374202,
"acc_stderr": 0.013306478243066302,
"acc_norm": 0.8339719029374202,
"acc_norm_stderr": 0.013306478243066302
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7398843930635838,
"acc_stderr": 0.023618678310069363,
"acc_norm": 0.7398843930635838,
"acc_norm_stderr": 0.023618678310069363
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.36201117318435755,
"acc_stderr": 0.016073067350153087,
"acc_norm": 0.36201117318435755,
"acc_norm_stderr": 0.016073067350153087
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7483660130718954,
"acc_stderr": 0.024848018263875195,
"acc_norm": 0.7483660130718954,
"acc_norm_stderr": 0.024848018263875195
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7202572347266881,
"acc_stderr": 0.025494259350694912,
"acc_norm": 0.7202572347266881,
"acc_norm_stderr": 0.025494259350694912
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7438271604938271,
"acc_stderr": 0.024288533637726095,
"acc_norm": 0.7438271604938271,
"acc_norm_stderr": 0.024288533637726095
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4858156028368794,
"acc_stderr": 0.02981549448368206,
"acc_norm": 0.4858156028368794,
"acc_norm_stderr": 0.02981549448368206
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.470013037809648,
"acc_stderr": 0.012747248967079064,
"acc_norm": 0.470013037809648,
"acc_norm_stderr": 0.012747248967079064
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.7058823529411765,
"acc_stderr": 0.02767846864214472,
"acc_norm": 0.7058823529411765,
"acc_norm_stderr": 0.02767846864214472
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.673202614379085,
"acc_stderr": 0.01897542792050721,
"acc_norm": 0.673202614379085,
"acc_norm_stderr": 0.01897542792050721
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6909090909090909,
"acc_stderr": 0.044262946482000985,
"acc_norm": 0.6909090909090909,
"acc_norm_stderr": 0.044262946482000985
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7346938775510204,
"acc_stderr": 0.02826388994378459,
"acc_norm": 0.7346938775510204,
"acc_norm_stderr": 0.02826388994378459
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8656716417910447,
"acc_stderr": 0.02411267824090081,
"acc_norm": 0.8656716417910447,
"acc_norm_stderr": 0.02411267824090081
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.88,
"acc_stderr": 0.03265986323710906,
"acc_norm": 0.88,
"acc_norm_stderr": 0.03265986323710906
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5481927710843374,
"acc_stderr": 0.03874371556587953,
"acc_norm": 0.5481927710843374,
"acc_norm_stderr": 0.03874371556587953
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8421052631578947,
"acc_stderr": 0.027966785859160893,
"acc_norm": 0.8421052631578947,
"acc_norm_stderr": 0.027966785859160893
},
"harness|truthfulqa:mc|0": {
"mc1": 0.41003671970624234,
"mc1_stderr": 0.017217844717449325,
"mc2": 0.5797198662912402,
"mc2_stderr": 0.015180976093776475
},
"harness|winogrande|5": {
"acc": 0.8113654301499605,
"acc_stderr": 0.010995172318019811
},
"harness|gsm8k|5": {
"acc": 0.6535253980288097,
"acc_stderr": 0.013107179054313401
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_giraffe176__Open_Maid_Samantha_Hermes_Orca_dare_ties | [
"region:us"
] | 2024-02-17T05:58:24+00:00 | {"pretty_name": "Evaluation run of giraffe176/Open_Maid_Samantha_Hermes_Orca_dare_ties", "dataset_summary": "Dataset automatically created during the evaluation run of model [giraffe176/Open_Maid_Samantha_Hermes_Orca_dare_ties](https://huggingface.co/giraffe176/Open_Maid_Samantha_Hermes_Orca_dare_ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_giraffe176__Open_Maid_Samantha_Hermes_Orca_dare_ties\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T05:56:05.382821](https://huggingface.co/datasets/open-llm-leaderboard/details_giraffe176__Open_Maid_Samantha_Hermes_Orca_dare_ties/blob/main/results_2024-02-17T05-56-05.382821.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6492929541395905,\n \"acc_stderr\": 0.03204314290781419,\n \"acc_norm\": 0.6502356687496237,\n \"acc_norm_stderr\": 0.032693608758353816,\n \"mc1\": 0.41003671970624234,\n \"mc1_stderr\": 0.017217844717449325,\n \"mc2\": 0.5797198662912402,\n \"mc2_stderr\": 0.015180976093776475\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6390784982935154,\n \"acc_stderr\": 0.014034761386175456,\n \"acc_norm\": 0.6774744027303754,\n \"acc_norm_stderr\": 0.013659980894277366\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6803425612427804,\n \"acc_stderr\": 0.004653907471785644,\n \"acc_norm\": 0.8638717386974706,\n \"acc_norm_stderr\": 0.003422238702226359\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.027943219989337128,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.027943219989337128\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146267,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146267\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5175438596491229,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.5175438596491229,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.025424835086924003,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.025424835086924003\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.02341529343356852,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.02341529343356852\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586818,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586818\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.02394672474156398,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.02394672474156398\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.02931820364520686,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.02931820364520686\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.02971914287634286,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.02971914287634286\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n \"acc_stderr\": 0.027599174300640766,\n \"acc_norm\": 0.8088235294117647,\n \"acc_norm_stderr\": 0.027599174300640766\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8675213675213675,\n \"acc_stderr\": 0.022209309073165612,\n \"acc_norm\": 0.8675213675213675,\n \"acc_norm_stderr\": 0.022209309073165612\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.013306478243066302,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.013306478243066302\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069363,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069363\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36201117318435755,\n \"acc_stderr\": 0.016073067350153087,\n \"acc_norm\": 0.36201117318435755,\n \"acc_norm_stderr\": 0.016073067350153087\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7483660130718954,\n \"acc_stderr\": 0.024848018263875195,\n \"acc_norm\": 0.7483660130718954,\n \"acc_norm_stderr\": 0.024848018263875195\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694912,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694912\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.024288533637726095,\n \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.024288533637726095\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.470013037809648,\n \"acc_stderr\": 0.012747248967079064,\n \"acc_norm\": 0.470013037809648,\n \"acc_norm_stderr\": 0.012747248967079064\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.02767846864214472,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.02767846864214472\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.673202614379085,\n \"acc_stderr\": 0.01897542792050721,\n \"acc_norm\": 0.673202614379085,\n \"acc_norm_stderr\": 0.01897542792050721\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.02826388994378459,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.02826388994378459\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8656716417910447,\n \"acc_stderr\": 0.02411267824090081,\n \"acc_norm\": 0.8656716417910447,\n \"acc_norm_stderr\": 0.02411267824090081\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160893,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160893\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.41003671970624234,\n \"mc1_stderr\": 0.017217844717449325,\n \"mc2\": 0.5797198662912402,\n \"mc2_stderr\": 0.015180976093776475\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8113654301499605,\n \"acc_stderr\": 0.010995172318019811\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6535253980288097,\n \"acc_stderr\": 0.013107179054313401\n }\n}\n```", "repo_url": "https://huggingface.co/giraffe176/Open_Maid_Samantha_Hermes_Orca_dare_ties", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|arc:challenge|25_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|gsm8k|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hellaswag|10_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T05-56-05.382821.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["**/details_harness|winogrande|5_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T05-56-05.382821.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T05_56_05.382821", "path": ["results_2024-02-17T05-56-05.382821.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T05-56-05.382821.parquet"]}]}]} | 2024-02-17T05:58:44+00:00 |
34cfd3106f16ffc3c21617cbe60b37da9c1abc64 | GroundCtrl/ColonoV3 | [
"license:openrail",
"region:us"
] | 2024-02-17T06:02:53+00:00 | {"license": "openrail"} | 2024-02-17T06:07:45+00:00 |
|
ea0273079f0e28994feec28fcd832839a61bb460 | ajsmith/ala2 | [
"license:mit",
"region:us"
] | 2024-02-17T06:02:55+00:00 | {"license": "mit"} | 2024-02-17T06:02:55+00:00 |
|
bc77c550cf1aef5d2f0645e048826f1c05bb68e5 |
# Dataset Card for Evaluation run of macadeliccc/SmaugDolphin-60B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [macadeliccc/SmaugDolphin-60B](https://huggingface.co/macadeliccc/SmaugDolphin-60B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_macadeliccc__SmaugDolphin-60B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T06:01:33.258417](https://huggingface.co/datasets/open-llm-leaderboard/details_macadeliccc__SmaugDolphin-60B/blob/main/results_2024-02-17T06-01-33.258417.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.7651915724176592,
"acc_stderr": 0.02813245689172726,
"acc_norm": 0.7689266432773564,
"acc_norm_stderr": 0.02866672384399442,
"mc1": 0.5006119951040392,
"mc1_stderr": 0.01750348793889251,
"mc2": 0.6743783536659786,
"mc2_stderr": 0.014425170450824752
},
"harness|arc:challenge|25": {
"acc": 0.697098976109215,
"acc_stderr": 0.013428241573185349,
"acc_norm": 0.7337883959044369,
"acc_norm_stderr": 0.012915774781523217
},
"harness|hellaswag|10": {
"acc": 0.6674965146385182,
"acc_stderr": 0.004701474865207032,
"acc_norm": 0.8654650468034256,
"acc_norm_stderr": 0.003405288007233201
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7333333333333333,
"acc_stderr": 0.038201699145179055,
"acc_norm": 0.7333333333333333,
"acc_norm_stderr": 0.038201699145179055
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.875,
"acc_stderr": 0.026913523521537846,
"acc_norm": 0.875,
"acc_norm_stderr": 0.026913523521537846
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.78,
"acc_stderr": 0.04163331998932262,
"acc_norm": 0.78,
"acc_norm_stderr": 0.04163331998932262
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7924528301886793,
"acc_stderr": 0.02495991802891127,
"acc_norm": 0.7924528301886793,
"acc_norm_stderr": 0.02495991802891127
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.9027777777777778,
"acc_stderr": 0.024774516250440182,
"acc_norm": 0.9027777777777778,
"acc_norm_stderr": 0.024774516250440182
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145633
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7514450867052023,
"acc_stderr": 0.03295304696818317,
"acc_norm": 0.7514450867052023,
"acc_norm_stderr": 0.03295304696818317
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5980392156862745,
"acc_stderr": 0.04878608714466996,
"acc_norm": 0.5980392156862745,
"acc_norm_stderr": 0.04878608714466996
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.79,
"acc_norm_stderr": 0.04093601807403326
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.7574468085106383,
"acc_stderr": 0.02802022627120022,
"acc_norm": 0.7574468085106383,
"acc_norm_stderr": 0.02802022627120022
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5964912280701754,
"acc_stderr": 0.04615186962583707,
"acc_norm": 0.5964912280701754,
"acc_norm_stderr": 0.04615186962583707
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.7517241379310344,
"acc_stderr": 0.036001056927277696,
"acc_norm": 0.7517241379310344,
"acc_norm_stderr": 0.036001056927277696
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.7275132275132276,
"acc_stderr": 0.022930973071633363,
"acc_norm": 0.7275132275132276,
"acc_norm_stderr": 0.022930973071633363
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5793650793650794,
"acc_stderr": 0.04415438226743745,
"acc_norm": 0.5793650793650794,
"acc_norm_stderr": 0.04415438226743745
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.896774193548387,
"acc_stderr": 0.01730838128103452,
"acc_norm": 0.896774193548387,
"acc_norm_stderr": 0.01730838128103452
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6798029556650246,
"acc_stderr": 0.032826493853041504,
"acc_norm": 0.6798029556650246,
"acc_norm_stderr": 0.032826493853041504
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8727272727272727,
"acc_stderr": 0.026024657651656177,
"acc_norm": 0.8727272727272727,
"acc_norm_stderr": 0.026024657651656177
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9292929292929293,
"acc_stderr": 0.01826310542019949,
"acc_norm": 0.9292929292929293,
"acc_norm_stderr": 0.01826310542019949
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9844559585492227,
"acc_stderr": 0.008927492715084329,
"acc_norm": 0.9844559585492227,
"acc_norm_stderr": 0.008927492715084329
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8128205128205128,
"acc_stderr": 0.019776601086550032,
"acc_norm": 0.8128205128205128,
"acc_norm_stderr": 0.019776601086550032
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.44814814814814813,
"acc_stderr": 0.030321167196316286,
"acc_norm": 0.44814814814814813,
"acc_norm_stderr": 0.030321167196316286
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8697478991596639,
"acc_stderr": 0.021863258494852107,
"acc_norm": 0.8697478991596639,
"acc_norm_stderr": 0.021863258494852107
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.4900662251655629,
"acc_stderr": 0.04081677107248436,
"acc_norm": 0.4900662251655629,
"acc_norm_stderr": 0.04081677107248436
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9137614678899083,
"acc_stderr": 0.012035597300116241,
"acc_norm": 0.9137614678899083,
"acc_norm_stderr": 0.012035597300116241
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6388888888888888,
"acc_stderr": 0.032757734861009996,
"acc_norm": 0.6388888888888888,
"acc_norm_stderr": 0.032757734861009996
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9313725490196079,
"acc_stderr": 0.017744453647073322,
"acc_norm": 0.9313725490196079,
"acc_norm_stderr": 0.017744453647073322
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9071729957805907,
"acc_stderr": 0.01888975055095671,
"acc_norm": 0.9071729957805907,
"acc_norm_stderr": 0.01888975055095671
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.8161434977578476,
"acc_stderr": 0.025998379092356517,
"acc_norm": 0.8161434977578476,
"acc_norm_stderr": 0.025998379092356517
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.9007633587786259,
"acc_stderr": 0.026222235171477374,
"acc_norm": 0.9007633587786259,
"acc_norm_stderr": 0.026222235171477374
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8760330578512396,
"acc_stderr": 0.030083098716035216,
"acc_norm": 0.8760330578512396,
"acc_norm_stderr": 0.030083098716035216
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8796296296296297,
"acc_stderr": 0.031457038543062504,
"acc_norm": 0.8796296296296297,
"acc_norm_stderr": 0.031457038543062504
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.852760736196319,
"acc_stderr": 0.027839915278339653,
"acc_norm": 0.852760736196319,
"acc_norm_stderr": 0.027839915278339653
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5714285714285714,
"acc_stderr": 0.04697113923010213,
"acc_norm": 0.5714285714285714,
"acc_norm_stderr": 0.04697113923010213
},
"harness|hendrycksTest-management|5": {
"acc": 0.8640776699029126,
"acc_stderr": 0.0339329572976101,
"acc_norm": 0.8640776699029126,
"acc_norm_stderr": 0.0339329572976101
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9358974358974359,
"acc_stderr": 0.016046261631673137,
"acc_norm": 0.9358974358974359,
"acc_norm_stderr": 0.016046261631673137
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.91,
"acc_stderr": 0.028762349126466125,
"acc_norm": 0.91,
"acc_norm_stderr": 0.028762349126466125
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9106002554278416,
"acc_stderr": 0.0102030178476883,
"acc_norm": 0.9106002554278416,
"acc_norm_stderr": 0.0102030178476883
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8208092485549133,
"acc_stderr": 0.020647590029679332,
"acc_norm": 0.8208092485549133,
"acc_norm_stderr": 0.020647590029679332
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.7955307262569833,
"acc_stderr": 0.013488813404711917,
"acc_norm": 0.7955307262569833,
"acc_norm_stderr": 0.013488813404711917
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8496732026143791,
"acc_stderr": 0.020464175124332625,
"acc_norm": 0.8496732026143791,
"acc_norm_stderr": 0.020464175124332625
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7942122186495176,
"acc_stderr": 0.022961339906764244,
"acc_norm": 0.7942122186495176,
"acc_norm_stderr": 0.022961339906764244
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8827160493827161,
"acc_stderr": 0.017903112615281127,
"acc_norm": 0.8827160493827161,
"acc_norm_stderr": 0.017903112615281127
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6418439716312057,
"acc_stderr": 0.028602085862759422,
"acc_norm": 0.6418439716312057,
"acc_norm_stderr": 0.028602085862759422
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.5853976531942634,
"acc_stderr": 0.012582597058908284,
"acc_norm": 0.5853976531942634,
"acc_norm_stderr": 0.012582597058908284
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8345588235294118,
"acc_stderr": 0.022571771025494746,
"acc_norm": 0.8345588235294118,
"acc_norm_stderr": 0.022571771025494746
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8169934640522876,
"acc_stderr": 0.015643069911273344,
"acc_norm": 0.8169934640522876,
"acc_norm_stderr": 0.015643069911273344
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7,
"acc_stderr": 0.04389311454644287,
"acc_norm": 0.7,
"acc_norm_stderr": 0.04389311454644287
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8489795918367347,
"acc_stderr": 0.022923004094736854,
"acc_norm": 0.8489795918367347,
"acc_norm_stderr": 0.022923004094736854
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8955223880597015,
"acc_stderr": 0.021628920516700643,
"acc_norm": 0.8955223880597015,
"acc_norm_stderr": 0.021628920516700643
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.91,
"acc_stderr": 0.028762349126466125,
"acc_norm": 0.91,
"acc_norm_stderr": 0.028762349126466125
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5843373493975904,
"acc_stderr": 0.03836722176598053,
"acc_norm": 0.5843373493975904,
"acc_norm_stderr": 0.03836722176598053
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8888888888888888,
"acc_stderr": 0.024103384202072864,
"acc_norm": 0.8888888888888888,
"acc_norm_stderr": 0.024103384202072864
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5006119951040392,
"mc1_stderr": 0.01750348793889251,
"mc2": 0.6743783536659786,
"mc2_stderr": 0.014425170450824752
},
"harness|winogrande|5": {
"acc": 0.835043409629045,
"acc_stderr": 0.010430917468237428
},
"harness|gsm8k|5": {
"acc": 0.709628506444276,
"acc_stderr": 0.012503592481818954
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_macadeliccc__SmaugDolphin-60B | [
"region:us"
] | 2024-02-17T06:03:46+00:00 | {"pretty_name": "Evaluation run of macadeliccc/SmaugDolphin-60B", "dataset_summary": "Dataset automatically created during the evaluation run of model [macadeliccc/SmaugDolphin-60B](https://huggingface.co/macadeliccc/SmaugDolphin-60B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_macadeliccc__SmaugDolphin-60B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T06:01:33.258417](https://huggingface.co/datasets/open-llm-leaderboard/details_macadeliccc__SmaugDolphin-60B/blob/main/results_2024-02-17T06-01-33.258417.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7651915724176592,\n \"acc_stderr\": 0.02813245689172726,\n \"acc_norm\": 0.7689266432773564,\n \"acc_norm_stderr\": 0.02866672384399442,\n \"mc1\": 0.5006119951040392,\n \"mc1_stderr\": 0.01750348793889251,\n \"mc2\": 0.6743783536659786,\n \"mc2_stderr\": 0.014425170450824752\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.697098976109215,\n \"acc_stderr\": 0.013428241573185349,\n \"acc_norm\": 0.7337883959044369,\n \"acc_norm_stderr\": 0.012915774781523217\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6674965146385182,\n \"acc_stderr\": 0.004701474865207032,\n \"acc_norm\": 0.8654650468034256,\n \"acc_norm_stderr\": 0.003405288007233201\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.038201699145179055,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.038201699145179055\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.875,\n \"acc_stderr\": 0.026913523521537846,\n \"acc_norm\": 0.875,\n \"acc_norm_stderr\": 0.026913523521537846\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7924528301886793,\n \"acc_stderr\": 0.02495991802891127,\n \"acc_norm\": 0.7924528301886793,\n \"acc_norm_stderr\": 0.02495991802891127\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9027777777777778,\n \"acc_stderr\": 0.024774516250440182,\n \"acc_norm\": 0.9027777777777778,\n \"acc_norm_stderr\": 0.024774516250440182\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.03295304696818317,\n \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.03295304696818317\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.5980392156862745,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.5980392156862745,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.7574468085106383,\n \"acc_stderr\": 0.02802022627120022,\n \"acc_norm\": 0.7574468085106383,\n \"acc_norm_stderr\": 0.02802022627120022\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n \"acc_stderr\": 0.04615186962583707,\n \"acc_norm\": 0.5964912280701754,\n \"acc_norm_stderr\": 0.04615186962583707\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.7517241379310344,\n \"acc_stderr\": 0.036001056927277696,\n \"acc_norm\": 0.7517241379310344,\n \"acc_norm_stderr\": 0.036001056927277696\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.7275132275132276,\n \"acc_stderr\": 0.022930973071633363,\n \"acc_norm\": 0.7275132275132276,\n \"acc_norm_stderr\": 0.022930973071633363\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5793650793650794,\n \"acc_stderr\": 0.04415438226743745,\n \"acc_norm\": 0.5793650793650794,\n \"acc_norm_stderr\": 0.04415438226743745\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.896774193548387,\n \"acc_stderr\": 0.01730838128103452,\n \"acc_norm\": 0.896774193548387,\n \"acc_norm_stderr\": 0.01730838128103452\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.6798029556650246,\n \"acc_stderr\": 0.032826493853041504,\n \"acc_norm\": 0.6798029556650246,\n \"acc_norm_stderr\": 0.032826493853041504\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8727272727272727,\n \"acc_stderr\": 0.026024657651656177,\n \"acc_norm\": 0.8727272727272727,\n \"acc_norm_stderr\": 0.026024657651656177\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.9292929292929293,\n \"acc_stderr\": 0.01826310542019949,\n \"acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.01826310542019949\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9844559585492227,\n \"acc_stderr\": 0.008927492715084329,\n \"acc_norm\": 0.9844559585492227,\n \"acc_norm_stderr\": 0.008927492715084329\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.8128205128205128,\n \"acc_stderr\": 0.019776601086550032,\n \"acc_norm\": 0.8128205128205128,\n \"acc_norm_stderr\": 0.019776601086550032\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.44814814814814813,\n \"acc_stderr\": 0.030321167196316286,\n \"acc_norm\": 0.44814814814814813,\n \"acc_norm_stderr\": 0.030321167196316286\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.8697478991596639,\n \"acc_stderr\": 0.021863258494852107,\n \"acc_norm\": 0.8697478991596639,\n \"acc_norm_stderr\": 0.021863258494852107\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.4900662251655629,\n \"acc_stderr\": 0.04081677107248436,\n \"acc_norm\": 0.4900662251655629,\n \"acc_norm_stderr\": 0.04081677107248436\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.9137614678899083,\n \"acc_stderr\": 0.012035597300116241,\n \"acc_norm\": 0.9137614678899083,\n \"acc_norm_stderr\": 0.012035597300116241\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.032757734861009996,\n \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.032757734861009996\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.9313725490196079,\n \"acc_stderr\": 0.017744453647073322,\n \"acc_norm\": 0.9313725490196079,\n \"acc_norm_stderr\": 0.017744453647073322\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.9071729957805907,\n \"acc_stderr\": 0.01888975055095671,\n \"acc_norm\": 0.9071729957805907,\n \"acc_norm_stderr\": 0.01888975055095671\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8161434977578476,\n \"acc_stderr\": 0.025998379092356517,\n \"acc_norm\": 0.8161434977578476,\n \"acc_norm_stderr\": 0.025998379092356517\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.9007633587786259,\n \"acc_stderr\": 0.026222235171477374,\n \"acc_norm\": 0.9007633587786259,\n \"acc_norm_stderr\": 0.026222235171477374\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035216,\n \"acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035216\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8796296296296297,\n \"acc_stderr\": 0.031457038543062504,\n \"acc_norm\": 0.8796296296296297,\n \"acc_norm_stderr\": 0.031457038543062504\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.852760736196319,\n \"acc_stderr\": 0.027839915278339653,\n \"acc_norm\": 0.852760736196319,\n \"acc_norm_stderr\": 0.027839915278339653\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.04697113923010213,\n \"acc_norm\": 0.5714285714285714,\n \"acc_norm_stderr\": 0.04697113923010213\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8640776699029126,\n \"acc_stderr\": 0.0339329572976101,\n \"acc_norm\": 0.8640776699029126,\n \"acc_norm_stderr\": 0.0339329572976101\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9358974358974359,\n \"acc_stderr\": 0.016046261631673137,\n \"acc_norm\": 0.9358974358974359,\n \"acc_norm_stderr\": 0.016046261631673137\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9106002554278416,\n \"acc_stderr\": 0.0102030178476883,\n \"acc_norm\": 0.9106002554278416,\n \"acc_norm_stderr\": 0.0102030178476883\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.8208092485549133,\n \"acc_stderr\": 0.020647590029679332,\n \"acc_norm\": 0.8208092485549133,\n \"acc_norm_stderr\": 0.020647590029679332\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.7955307262569833,\n \"acc_stderr\": 0.013488813404711917,\n \"acc_norm\": 0.7955307262569833,\n \"acc_norm_stderr\": 0.013488813404711917\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.8496732026143791,\n \"acc_stderr\": 0.020464175124332625,\n \"acc_norm\": 0.8496732026143791,\n \"acc_norm_stderr\": 0.020464175124332625\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7942122186495176,\n \"acc_stderr\": 0.022961339906764244,\n \"acc_norm\": 0.7942122186495176,\n \"acc_norm_stderr\": 0.022961339906764244\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.8827160493827161,\n \"acc_stderr\": 0.017903112615281127,\n \"acc_norm\": 0.8827160493827161,\n \"acc_norm_stderr\": 0.017903112615281127\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.6418439716312057,\n \"acc_stderr\": 0.028602085862759422,\n \"acc_norm\": 0.6418439716312057,\n \"acc_norm_stderr\": 0.028602085862759422\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5853976531942634,\n \"acc_stderr\": 0.012582597058908284,\n \"acc_norm\": 0.5853976531942634,\n \"acc_norm_stderr\": 0.012582597058908284\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.8345588235294118,\n \"acc_stderr\": 0.022571771025494746,\n \"acc_norm\": 0.8345588235294118,\n \"acc_norm_stderr\": 0.022571771025494746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.8169934640522876,\n \"acc_stderr\": 0.015643069911273344,\n \"acc_norm\": 0.8169934640522876,\n \"acc_norm_stderr\": 0.015643069911273344\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8489795918367347,\n \"acc_stderr\": 0.022923004094736854,\n \"acc_norm\": 0.8489795918367347,\n \"acc_norm_stderr\": 0.022923004094736854\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8955223880597015,\n \"acc_stderr\": 0.021628920516700643,\n \"acc_norm\": 0.8955223880597015,\n \"acc_norm_stderr\": 0.021628920516700643\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.024103384202072864,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.024103384202072864\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5006119951040392,\n \"mc1_stderr\": 0.01750348793889251,\n \"mc2\": 0.6743783536659786,\n \"mc2_stderr\": 0.014425170450824752\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.835043409629045,\n \"acc_stderr\": 0.010430917468237428\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.709628506444276,\n \"acc_stderr\": 0.012503592481818954\n }\n}\n```", "repo_url": "https://huggingface.co/macadeliccc/SmaugDolphin-60B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|arc:challenge|25_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|gsm8k|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hellaswag|10_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T06-01-33.258417.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["**/details_harness|winogrande|5_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T06-01-33.258417.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T06_01_33.258417", "path": ["results_2024-02-17T06-01-33.258417.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T06-01-33.258417.parquet"]}]}]} | 2024-02-17T06:04:08+00:00 |
41507c68fb26dd48f54b1841655da9750ce70c91 | Atipico1/nq-test-adv-replaced | [
"region:us"
] | 2024-02-17T06:06:21+00:00 | {"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "entity", "dtype": "string"}, {"name": "similar_entity", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "ctxs", "list": [{"name": "hasanswer", "dtype": "bool"}, {"name": "score", "dtype": "float64"}, {"name": "text", "dtype": "string"}, {"name": "title", "dtype": "string"}]}, {"name": "masked_query", "dtype": "string"}, {"name": "original_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "unans_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "conflict_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "conflict_context", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "context", "dtype": "string"}, {"name": "context_vague", "dtype": "string"}, {"name": "entities", "dtype": "string"}, {"name": "entities_count", "dtype": "int64"}, {"name": "adv_sent", "dtype": "string"}, {"name": "adv_passage", "dtype": "string"}, {"name": "hasanswer", "dtype": "bool"}, {"name": "is_adversarial", "dtype": "bool"}], "splits": [{"name": "test", "num_bytes": 58003429, "num_examples": 3610}], "download_size": 33899916, "dataset_size": 58003429}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T06:06:29+00:00 |
|
d4020a57e34373d70476747119c1ed4861b90eb1 |
# Dataset Card for Evaluation run of fzzhang/toten_gsm8k_merged_s
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [fzzhang/toten_gsm8k_merged_s](https://huggingface.co/fzzhang/toten_gsm8k_merged_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_fzzhang__toten_gsm8k_merged_s",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-17T06:12:40.059000](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__toten_gsm8k_merged_s/blob/main/results_2024-02-17T06-12-40.059000.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6303593896054516,
"acc_stderr": 0.032532314502917783,
"acc_norm": 0.6323776185375855,
"acc_norm_stderr": 0.033183029878923596,
"mc1": 0.3818849449204406,
"mc1_stderr": 0.01700810193916349,
"mc2": 0.5491954242070916,
"mc2_stderr": 0.015160040858276722
},
"harness|arc:challenge|25": {
"acc": 0.6100682593856656,
"acc_stderr": 0.014252959848892896,
"acc_norm": 0.6527303754266212,
"acc_norm_stderr": 0.01391303452962045
},
"harness|hellaswag|10": {
"acc": 0.6602270464050985,
"acc_stderr": 0.00472664053256204,
"acc_norm": 0.847042421828321,
"acc_norm_stderr": 0.0035921097436286175
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5703703703703704,
"acc_stderr": 0.042763494943766,
"acc_norm": 0.5703703703703704,
"acc_norm_stderr": 0.042763494943766
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6842105263157895,
"acc_stderr": 0.0378272898086547,
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.0378272898086547
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6754716981132075,
"acc_stderr": 0.02881561571343211,
"acc_norm": 0.6754716981132075,
"acc_norm_stderr": 0.02881561571343211
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7430555555555556,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.7430555555555556,
"acc_norm_stderr": 0.03653946969442099
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6763005780346821,
"acc_stderr": 0.0356760379963917,
"acc_norm": 0.6763005780346821,
"acc_norm_stderr": 0.0356760379963917
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.049406356306056595,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.049406356306056595
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5574468085106383,
"acc_stderr": 0.03246956919789958,
"acc_norm": 0.5574468085106383,
"acc_norm_stderr": 0.03246956919789958
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5263157894736842,
"acc_stderr": 0.046970851366478626,
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.046970851366478626
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5586206896551724,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.5586206896551724,
"acc_norm_stderr": 0.04137931034482758
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.025331202438944444,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.025331202438944444
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.40476190476190477,
"acc_stderr": 0.04390259265377562,
"acc_norm": 0.40476190476190477,
"acc_norm_stderr": 0.04390259265377562
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7548387096774194,
"acc_stderr": 0.02447224384089552,
"acc_norm": 0.7548387096774194,
"acc_norm_stderr": 0.02447224384089552
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.47783251231527096,
"acc_stderr": 0.03514528562175008,
"acc_norm": 0.47783251231527096,
"acc_norm_stderr": 0.03514528562175008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.03192271569548301,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.03192271569548301
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7676767676767676,
"acc_stderr": 0.030088629490217487,
"acc_norm": 0.7676767676767676,
"acc_norm_stderr": 0.030088629490217487
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8652849740932642,
"acc_stderr": 0.02463978909770944,
"acc_norm": 0.8652849740932642,
"acc_norm_stderr": 0.02463978909770944
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6256410256410256,
"acc_stderr": 0.0245375915728305,
"acc_norm": 0.6256410256410256,
"acc_norm_stderr": 0.0245375915728305
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.029045600290616255,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.029045600290616255
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6512605042016807,
"acc_stderr": 0.030956636328566548,
"acc_norm": 0.6512605042016807,
"acc_norm_stderr": 0.030956636328566548
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8238532110091743,
"acc_stderr": 0.016332882393431388,
"acc_norm": 0.8238532110091743,
"acc_norm_stderr": 0.016332882393431388
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.0340470532865388,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.0340470532865388
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7941176470588235,
"acc_stderr": 0.028379449451588667,
"acc_norm": 0.7941176470588235,
"acc_norm_stderr": 0.028379449451588667
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7890295358649789,
"acc_stderr": 0.026558372502661916,
"acc_norm": 0.7890295358649789,
"acc_norm_stderr": 0.026558372502661916
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6502242152466368,
"acc_stderr": 0.03200736719484503,
"acc_norm": 0.6502242152466368,
"acc_norm_stderr": 0.03200736719484503
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7251908396946565,
"acc_stderr": 0.039153454088478354,
"acc_norm": 0.7251908396946565,
"acc_norm_stderr": 0.039153454088478354
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7768595041322314,
"acc_stderr": 0.03800754475228733,
"acc_norm": 0.7768595041322314,
"acc_norm_stderr": 0.03800754475228733
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7592592592592593,
"acc_stderr": 0.04133119440243838,
"acc_norm": 0.7592592592592593,
"acc_norm_stderr": 0.04133119440243838
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7361963190184049,
"acc_stderr": 0.03462419931615624,
"acc_norm": 0.7361963190184049,
"acc_norm_stderr": 0.03462419931615624
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4732142857142857,
"acc_stderr": 0.047389751192741546,
"acc_norm": 0.4732142857142857,
"acc_norm_stderr": 0.047389751192741546
},
"harness|hendrycksTest-management|5": {
"acc": 0.7961165048543689,
"acc_stderr": 0.039891398595317706,
"acc_norm": 0.7961165048543689,
"acc_norm_stderr": 0.039891398595317706
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8418803418803419,
"acc_stderr": 0.023902325549560396,
"acc_norm": 0.8418803418803419,
"acc_norm_stderr": 0.023902325549560396
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8250319284802043,
"acc_stderr": 0.013586619219903341,
"acc_norm": 0.8250319284802043,
"acc_norm_stderr": 0.013586619219903341
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6907514450867052,
"acc_stderr": 0.02488314057007176,
"acc_norm": 0.6907514450867052,
"acc_norm_stderr": 0.02488314057007176
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3027932960893855,
"acc_stderr": 0.015366860386397108,
"acc_norm": 0.3027932960893855,
"acc_norm_stderr": 0.015366860386397108
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.738562091503268,
"acc_stderr": 0.025160998214292452,
"acc_norm": 0.738562091503268,
"acc_norm_stderr": 0.025160998214292452
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.707395498392283,
"acc_stderr": 0.025839898334877983,
"acc_norm": 0.707395498392283,
"acc_norm_stderr": 0.025839898334877983
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7253086419753086,
"acc_stderr": 0.024836057868294677,
"acc_norm": 0.7253086419753086,
"acc_norm_stderr": 0.024836057868294677
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.46808510638297873,
"acc_stderr": 0.029766675075873866,
"acc_norm": 0.46808510638297873,
"acc_norm_stderr": 0.029766675075873866
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4439374185136897,
"acc_stderr": 0.012689708167787687,
"acc_norm": 0.4439374185136897,
"acc_norm_stderr": 0.012689708167787687
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6397058823529411,
"acc_stderr": 0.029163128570670733,
"acc_norm": 0.6397058823529411,
"acc_norm_stderr": 0.029163128570670733
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6650326797385621,
"acc_stderr": 0.019094228167000325,
"acc_norm": 0.6650326797385621,
"acc_norm_stderr": 0.019094228167000325
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7387755102040816,
"acc_stderr": 0.028123429335142783,
"acc_norm": 0.7387755102040816,
"acc_norm_stderr": 0.028123429335142783
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.835820895522388,
"acc_stderr": 0.026193923544454125,
"acc_norm": 0.835820895522388,
"acc_norm_stderr": 0.026193923544454125
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.86,
"acc_stderr": 0.0348735088019777,
"acc_norm": 0.86,
"acc_norm_stderr": 0.0348735088019777
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5662650602409639,
"acc_stderr": 0.03858158940685515,
"acc_norm": 0.5662650602409639,
"acc_norm_stderr": 0.03858158940685515
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8011695906432749,
"acc_stderr": 0.030611116557432528,
"acc_norm": 0.8011695906432749,
"acc_norm_stderr": 0.030611116557432528
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3818849449204406,
"mc1_stderr": 0.01700810193916349,
"mc2": 0.5491954242070916,
"mc2_stderr": 0.015160040858276722
},
"harness|winogrande|5": {
"acc": 0.7790055248618785,
"acc_stderr": 0.011661223637643416
},
"harness|gsm8k|5": {
"acc": 0.5921152388172858,
"acc_stderr": 0.013536742075643088
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] | open-llm-leaderboard/details_fzzhang__toten_gsm8k_merged_s | [
"region:us"
] | 2024-02-17T06:14:58+00:00 | {"pretty_name": "Evaluation run of fzzhang/toten_gsm8k_merged_s", "dataset_summary": "Dataset automatically created during the evaluation run of model [fzzhang/toten_gsm8k_merged_s](https://huggingface.co/fzzhang/toten_gsm8k_merged_s) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fzzhang__toten_gsm8k_merged_s\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-02-17T06:12:40.059000](https://huggingface.co/datasets/open-llm-leaderboard/details_fzzhang__toten_gsm8k_merged_s/blob/main/results_2024-02-17T06-12-40.059000.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6303593896054516,\n \"acc_stderr\": 0.032532314502917783,\n \"acc_norm\": 0.6323776185375855,\n \"acc_norm_stderr\": 0.033183029878923596,\n \"mc1\": 0.3818849449204406,\n \"mc1_stderr\": 0.01700810193916349,\n \"mc2\": 0.5491954242070916,\n \"mc2_stderr\": 0.015160040858276722\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6100682593856656,\n \"acc_stderr\": 0.014252959848892896,\n \"acc_norm\": 0.6527303754266212,\n \"acc_norm_stderr\": 0.01391303452962045\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6602270464050985,\n \"acc_stderr\": 0.00472664053256204,\n \"acc_norm\": 0.847042421828321,\n \"acc_norm_stderr\": 0.0035921097436286175\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.042763494943766,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.042763494943766\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.02881561571343211,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.02881561571343211\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.0356760379963917,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.0356760379963917\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5263157894736842,\n \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.5263157894736842,\n \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944444,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944444\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7548387096774194,\n \"acc_stderr\": 0.02447224384089552,\n \"acc_norm\": 0.7548387096774194,\n \"acc_norm_stderr\": 0.02447224384089552\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.47783251231527096,\n \"acc_stderr\": 0.03514528562175008,\n \"acc_norm\": 0.47783251231527096,\n \"acc_norm_stderr\": 0.03514528562175008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.02463978909770944,\n \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.02463978909770944\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6256410256410256,\n \"acc_stderr\": 0.0245375915728305,\n \"acc_norm\": 0.6256410256410256,\n \"acc_norm_stderr\": 0.0245375915728305\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6512605042016807,\n \"acc_stderr\": 0.030956636328566548,\n \"acc_norm\": 0.6512605042016807,\n \"acc_norm_stderr\": 0.030956636328566548\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8238532110091743,\n \"acc_stderr\": 0.016332882393431388,\n \"acc_norm\": 0.8238532110091743,\n \"acc_norm_stderr\": 0.016332882393431388\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.028379449451588667,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.028379449451588667\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7890295358649789,\n \"acc_stderr\": 0.026558372502661916,\n \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.026558372502661916\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6502242152466368,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.6502242152466368,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.039153454088478354,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.039153454088478354\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7361963190184049,\n \"acc_stderr\": 0.03462419931615624,\n \"acc_norm\": 0.7361963190184049,\n \"acc_norm_stderr\": 0.03462419931615624\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8418803418803419,\n \"acc_stderr\": 0.023902325549560396,\n \"acc_norm\": 0.8418803418803419,\n \"acc_norm_stderr\": 0.023902325549560396\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903341,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903341\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6907514450867052,\n \"acc_stderr\": 0.02488314057007176,\n \"acc_norm\": 0.6907514450867052,\n \"acc_norm_stderr\": 0.02488314057007176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3027932960893855,\n \"acc_stderr\": 0.015366860386397108,\n \"acc_norm\": 0.3027932960893855,\n \"acc_norm_stderr\": 0.015366860386397108\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292452,\n \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292452\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.025839898334877983,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.025839898334877983\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.024836057868294677,\n \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.024836057868294677\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873866,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873866\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4439374185136897,\n \"acc_stderr\": 0.012689708167787687,\n \"acc_norm\": 0.4439374185136897,\n \"acc_norm_stderr\": 0.012689708167787687\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6397058823529411,\n \"acc_stderr\": 0.029163128570670733,\n \"acc_norm\": 0.6397058823529411,\n \"acc_norm_stderr\": 0.029163128570670733\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6650326797385621,\n \"acc_stderr\": 0.019094228167000325,\n \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.019094228167000325\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685515,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3818849449204406,\n \"mc1_stderr\": 0.01700810193916349,\n \"mc2\": 0.5491954242070916,\n \"mc2_stderr\": 0.015160040858276722\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7790055248618785,\n \"acc_stderr\": 0.011661223637643416\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5921152388172858,\n \"acc_stderr\": 0.013536742075643088\n }\n}\n```", "repo_url": "https://huggingface.co/fzzhang/toten_gsm8k_merged_s", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "clementine@hf.co", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|arc:challenge|25_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|gsm8k|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hellaswag|10_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-management|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-virology|5_2024-02-17T06-12-40.059000.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["**/details_harness|winogrande|5_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-02-17T06-12-40.059000.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_02_17T06_12_40.059000", "path": ["results_2024-02-17T06-12-40.059000.parquet"]}, {"split": "latest", "path": ["results_2024-02-17T06-12-40.059000.parquet"]}]}]} | 2024-02-17T06:15:21+00:00 |
61eca7afa19fb6b59805da1db5f8943f5d64eec3 | benayas/massive_augmented_10pct_v0 | [
"region:us"
] | 2024-02-17T06:18:40+00:00 | {"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "locale", "dtype": "string"}, {"name": "partition", "dtype": "string"}, {"name": "scenario", "dtype": "float64"}, {"name": "intent", "dtype": "float64"}, {"name": "utt", "dtype": "string"}, {"name": "annot_utt", "dtype": "string"}, {"name": "worker_id", "dtype": "string"}, {"name": "slot_method", "struct": [{"name": "method", "sequence": "null"}, {"name": "slot", "sequence": "null"}]}, {"name": "judgments", "struct": [{"name": "grammar_score", "sequence": "int8"}, {"name": "intent_score", "sequence": "int8"}, {"name": "language_identification", "sequence": "null"}, {"name": "slots_score", "sequence": "int8"}, {"name": "spelling_score", "sequence": "int8"}, {"name": "worker_id", "sequence": "null"}]}, {"name": "category", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 1703765, "num_examples": 11514}], "download_size": 436326, "dataset_size": 1703765}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T06:18:42+00:00 |
|
a6435063408b8b0c4f4dd9b25e34b0c9bf943f5d | Atipico1/nq-test-adv-replace-v2 | [
"region:us"
] | 2024-02-17T06:26:59+00:00 | {"dataset_info": {"features": [{"name": "question", "dtype": "string"}, {"name": "entity", "dtype": "string"}, {"name": "similar_entity", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "ctxs", "list": [{"name": "hasanswer", "dtype": "bool"}, {"name": "score", "dtype": "float64"}, {"name": "text", "dtype": "string"}, {"name": "title", "dtype": "string"}]}, {"name": "masked_query", "dtype": "string"}, {"name": "original_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "unans_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "answers", "sequence": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "conflict_case", "list": [{"name": "answer", "dtype": "string"}, {"name": "conflict_context", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "distance", "dtype": "string"}, {"name": "original_answers", "sequence": "string"}, {"name": "question", "dtype": "string"}]}, {"name": "context", "dtype": "string"}, {"name": "context_vague", "dtype": "string"}, {"name": "entities", "dtype": "string"}, {"name": "entities_count", "dtype": "int64"}, {"name": "adv_sent", "dtype": "string"}, {"name": "adv_passage", "dtype": "string"}, {"name": "hasanswer", "dtype": "bool"}, {"name": "is_adversarial", "dtype": "bool"}], "splits": [{"name": "test", "num_bytes": 57839023, "num_examples": 3610}], "download_size": 33744383, "dataset_size": 57839023}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "data/test-*"}]}]} | 2024-02-17T06:27:05+00:00 |
|
8487e02c96b1b855bd323c5ba152178686ac6667 | mukilanvk/yolo1 | [
"region:us"
] | 2024-02-17T06:42:33+00:00 | {} | 2024-02-17T06:59:21+00:00 |
|
c5e35498bc221d674bb27ce758076e9715c61668 | jan-hq/finqa_bench_stealth-finance-v3 | [
"region:us"
] | 2024-02-17T06:43:48+00:00 | {"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "query", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "options", "struct": [{"name": "A", "dtype": "string"}, {"name": "B", "dtype": "string"}, {"name": "C", "dtype": "string"}, {"name": "D", "dtype": "string"}]}, {"name": "golden_key", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 25042046, "num_examples": 5074}], "download_size": 11118711, "dataset_size": 25042046}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]} | 2024-02-17T06:43:52+00:00 |