datasetId
stringlengths 5
121
| author
stringlengths 2
42
| last_modified
unknown | downloads
int64 0
34.4M
| likes
int64 0
5.77k
| tags
sequencelengths 1
7.92k
| task_categories
sequencelengths 0
40
⌀ | createdAt
unknown | card
stringlengths 19
977k
|
---|---|---|---|---|---|---|---|---|
upvantage/claude-context-35k | upvantage | "2024-09-29T14:51:03Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T14:50:48Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 57969772
num_examples: 39117
download_size: 29619495
dataset_size: 57969772
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
mteb/webis-touche2020-v3 | mteb | "2024-09-29T14:58:03Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T14:56:57Z" | ---
dataset_info:
- config_name: corpus
features:
- name: _id
dtype: string
- name: title
dtype: string
- name: text
dtype: string
splits:
- name: corpus
num_bytes: 653065504
num_examples: 303732
download_size: 356788575
dataset_size: 653065504
- config_name: default
features:
- name: query-id
dtype: string
- name: corpus-id
dtype: string
- name: score
dtype: float64
splits:
- name: test
num_bytes: 161729
num_examples: 2849
download_size: 52071
dataset_size: 161729
- config_name: queries
features:
- name: _id
dtype: string
- name: text
dtype: string
- name: metadata
struct:
- name: description
dtype: string
- name: narrative
dtype: string
splits:
- name: train
num_bytes: 25863
num_examples: 49
download_size: 20176
dataset_size: 25863
configs:
- config_name: corpus
data_files:
- split: corpus
path: corpus/corpus-*
- config_name: default
data_files:
- split: test
path: data/test-*
- config_name: queries
data_files:
- split: train
path: queries/train-*
---
|
Noveled/kr_ink_painting_06 | Noveled | "2024-09-29T15:00:20Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T14:58:08Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 2646136278.0
num_examples: 477
download_size: 2497424646
dataset_size: 2646136278.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Eka-Korn/distillbert-qa-russian | Eka-Korn | "2024-09-29T14:58:59Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T14:58:42Z" | ---
dataset_info:
features:
- name: input_ids
sequence: int32
- name: attention_mask
sequence: int8
- name: start_positions
dtype: int64
- name: end_positions
dtype: int64
- name: sequence_ids
sequence: int64
splits:
- name: train
num_bytes: 388306912.0
num_examples: 57544
- name: test
num_bytes: 97083476.0
num_examples: 14387
download_size: 46592442
dataset_size: 485390388.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
Noveled/kr_ink_painting_07 | Noveled | "2024-09-29T15:03:17Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:00:56Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 2675062668.0
num_examples: 498
download_size: 2512727274
dataset_size: 2675062668.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DLI-Lab/prelim_analysis_description_with_refined_tao_annotation | DLI-Lab | "2024-09-29T15:11:39Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:02:58Z" | ---
dataset_info:
features:
- name: task_id
dtype: string
- name: step_idx
dtype: int64
- name: cur_observation
dtype: string
- name: prev_observation
dtype: string
- name: next_observation
dtype: string
- name: prev_actions
sequence: string
- name: gold_action
dtype: string
- name: objective
dtype: string
- name: url
dtype: string
- name: next_state_tao
dtype: string
- name: new_items
dtype: string
- name: updated_items
dtype: string
- name: deleted_items
dtype: string
- name: value_score
dtype: float64
- name: refined_tao
dtype: string
- name: raw_prediction
dtype: string
- name: rationale
dtype: string
- name: next_state_description_with_tao
dtype: string
- name: raw_neg_actions
dtype: string
- name: neg_actions
sequence: string
- name: neg_actions_w_rationale
sequence: string
- name: neg_next_state_simple
sequence: string
- name: gt_negative_observations
struct:
- name: neg_observation0
dtype: string
- name: neg_observation1
dtype: string
- name: neg_observation2
dtype: string
- name: neg_observation3
dtype: string
- name: neg_observation4
dtype: string
- name: neg_observation5
dtype: string
- name: neg_observation6
dtype: string
- name: neg_observation7
dtype: string
- name: neg_observation8
dtype: string
- name: negative_actions
struct:
- name: neg_action0
dtype: string
- name: neg_action1
dtype: string
- name: neg_action2
dtype: string
- name: neg_action3
dtype: string
- name: neg_action4
dtype: string
- name: neg_action5
dtype: string
- name: neg_action6
dtype: string
- name: neg_action7
dtype: string
- name: neg_action8
dtype: string
- name: neg_tao_list
sequence: string
- name: neg_update_dict_list
list:
- name: deleted_items
dtype: string
- name: new_items
dtype: string
- name: updated_items
dtype: string
- name: neg_refined_tao_list
sequence: string
- name: neg_descriptions_with_tao
sequence: string
splits:
- name: train
num_bytes: 89444429
num_examples: 102
download_size: 14029785
dataset_size: 89444429
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
lokeshreddymtpl/Reformat_dataset-llama2-1k | lokeshreddymtpl | "2024-09-29T15:03:27Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:03:26Z" | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 2929
num_examples: 29
download_size: 2034
dataset_size: 2929
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Noveled/kr_ink_painting_09 | Noveled | "2024-09-29T15:07:49Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:03:30Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 5140249290.85
num_examples: 1075
download_size: 4698505139
dataset_size: 5140249290.85
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
JaineLi/VL-MIA | JaineLi | "2024-09-29T16:02:14Z" | 0 | 0 | [
"license:cc-by-4.0",
"modality:image",
"region:us",
"MIA",
" LLM",
" VLLM"
] | null | "2024-09-29T15:11:13Z" | ---
license: cc-by-4.0
tags:
- MIA
- ' LLM'
- ' VLLM'
---
#### VL-MIA
VL-MIA is elaborated for membership inference attacks on VLLM :
![image.png](https://cdn-uploads.huggingface.co/production/uploads/6515968a60757b8c8f686b6a/fga5psjQnLyfE6_Kzz2Kc.png)
**Label 0**: Refers to the unseen non-member data. **Label 1**: Refers to member data.
#### Finetune datasets
the Geometry dataset and the Password dataset. The image in the Geometry dataset consists of a random 4x4 arrangement of geometrical shapes, and the image in the Password dataset consists of a random 6x6 arrangement of characters and digits from EMINST [1] and MNIST. The associated text is its corresponding content (e.g., characters, colors, shapes). We select half of the datasets, that can be considered as the member set, to finetune VLLM while the remaining part is the non-member set. |
WebWizard/data_mix_v0.8_stage1 | WebWizard | "2024-09-29T16:49:29Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:12:44Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: image
dtype: image
- name: conversations
dtype: string
splits:
- name: caption
num_bytes: 130555482142.75
num_examples: 238395
download_size: 125871427732
dataset_size: 130555482142.75
configs:
- config_name: default
data_files:
- split: caption
path: data/caption-*
---
|
open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table-details | open-llm-leaderboard | "2024-09-29T15:16:49Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:13:00Z" | ---
pretty_name: Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table-details\"\
,\n\tname=\"yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T15-12-59.528765](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table/results_2024-09-29T15-12-59.528765.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.08836858006042296,\n \"exact_match_stderr,none\"\
: 0.007640587008385335,\n \"acc,none\": 0.37175864361702127,\n \
\ \"acc_stderr,none\": 0.004405984196843539,\n \"inst_level_loose_acc,none\"\
: 0.8105515587529976,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"inst_level_strict_acc,none\": 0.7266187050359713,\n \
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\"\
: 0.4436373070437151,\n \"acc_norm_stderr,none\": 0.005348605915705943,\n\
\ \"prompt_level_strict_acc,none\": 0.6303142329020333,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.020772943616332303,\n \"\
prompt_level_loose_acc,none\": 0.7319778188539742,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.01906063869163028,\n \"alias\": \"leaderboard\"\n },\n \
\ \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.4922756465891338,\n\
\ \"acc_norm_stderr,none\": 0.006267767960027899,\n \"alias\"\
: \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.808,\n \"acc_norm_stderr,none\"\
: 0.024960691989172005,\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
\n },\n \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\"\
: 0.6042780748663101,\n \"acc_norm_stderr,none\": 0.03585560071592546,\n\
\ \"alias\": \" - leaderboard_bbh_causal_judgement\"\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.456,\n \"acc_norm_stderr,none\": 0.0315632850612134,\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\": 0.568,\n\
\ \"acc_norm_stderr,none\": 0.031391810765429407,\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\"\
: 0.03156328506121339,\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
\n },\n \"leaderboard_bbh_geometric_shapes\": {\n \"acc_norm,none\"\
: 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578683,\n \
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"\
leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\": 0.564,\n \
\ \"acc_norm_stderr,none\": 0.03142556706028128,\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\"\
: 0.030562070620993163,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968782,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.0316821564314138,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.532,\n\
\ \"acc_norm_stderr,none\": 0.031621252575725504,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\"\
: 0.030760116042626042,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.541095890410959,\n \"acc_norm_stderr,none\": 0.041382249050673066,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.632,\n \"acc_norm_stderr,none\": 0.030562070620993163,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.696,\n \"acc_norm_stderr,none\": 0.029150213374159677,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\"\
: 0.031621252575725504,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.47752808988764045,\n \"acc_norm_stderr,none\": 0.03754432508487193,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.804,\n \"acc_norm_stderr,none\"\
: 0.02515685731325593,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\": 0.029933259094191516,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.188,\n \"acc_norm_stderr,none\": 0.02476037772775051,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\"\
: 0.02496069198917202,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.384,\n \"acc_norm_stderr,none\"\
: 0.030821679117375378,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.484,\n \"acc_norm_stderr,none\": 0.03166998503010742,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\"\
: 0.01270453266853576,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.26262626262626265,\n\
\ \"acc_norm_stderr,none\": 0.031353050095330855,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.2692307692307692,\n \"acc_norm_stderr,none\"\
: 0.019000027142915377,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.24553571428571427,\n \"acc_norm_stderr,none\": 0.020357428454484575,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.6303142329020333,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.020772943616332303,\n \
\ \"inst_level_strict_acc,none\": 0.7266187050359713,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7319778188539742,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.019060638691630282,\n \
\ \"inst_level_loose_acc,none\": 0.8105515587529976,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.08836858006042296,\n\
\ \"exact_match_stderr,none\": 0.007640587008385335,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.12052117263843648,\n \"exact_match_stderr,none\"\
: 0.018611597654928206,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.032520325203252036,\n \"exact_match_stderr,none\"\
: 0.01605899820587974,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.045454545454545456,\n \"exact_match_stderr,none\": 0.018199158975632696,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.02857142857142857,\n \"exact_match_stderr,none\": 0.009973998820736026,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.09740259740259741,\n \"exact_match_stderr,none\": 0.023971024368870247,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.20725388601036268,\n \"exact_match_stderr,none\": 0.02925282329180363,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.05185185185185185,\n \"exact_match_stderr,none\": 0.019154368449050514,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n\
\ \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.37175864361702127,\n\
\ \"acc_stderr,none\": 0.004405984196843539,\n \"alias\":\
\ \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.3637566137566138,\n \"acc_norm_stderr,none\"\
: 0.017089741838981088,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\"\
: 0.508,\n \"acc_norm_stderr,none\": 0.031682156431413803,\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"\
leaderboard_musr_object_placements\": {\n \"acc_norm,none\": 0.25,\n\
\ \"acc_norm_stderr,none\": 0.02711630722733202,\n \"alias\"\
: \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\"\
: 0.029933259094191516,\n \"alias\": \" - leaderboard_musr_team_allocation\"\
\n }\n },\n \"leaderboard\": {\n \"exact_match,none\": 0.08836858006042296,\n\
\ \"exact_match_stderr,none\": 0.007640587008385335,\n \"acc,none\"\
: 0.37175864361702127,\n \"acc_stderr,none\": 0.004405984196843539,\n \
\ \"inst_level_loose_acc,none\": 0.8105515587529976,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"inst_level_strict_acc,none\": 0.7266187050359713,\n \
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\": 0.4436373070437151,\n\
\ \"acc_norm_stderr,none\": 0.005348605915705943,\n \"prompt_level_strict_acc,none\"\
: 0.6303142329020333,\n \"prompt_level_strict_acc_stderr,none\": 0.020772943616332303,\n\
\ \"prompt_level_loose_acc,none\": 0.7319778188539742,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.01906063869163028,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.4922756465891338,\n \"acc_norm_stderr,none\"\
: 0.006267767960027899,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\": 0.808,\n\
\ \"acc_norm_stderr,none\": 0.024960691989172005,\n \"alias\": \"\
\ - leaderboard_bbh_boolean_expressions\"\n },\n \"leaderboard_bbh_causal_judgement\"\
: {\n \"acc_norm,none\": 0.6042780748663101,\n \"acc_norm_stderr,none\"\
: 0.03585560071592546,\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
\n },\n \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.456,\n \"acc_norm_stderr,none\": 0.0315632850612134,\n \"alias\"\
: \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\": 0.031391810765429407,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.544,\n \"acc_norm_stderr,none\": 0.03156328506121339,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578683,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028128,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.030562070620993163,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968782,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.508,\n \"acc_norm_stderr,none\": 0.0316821564314138,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626042,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.541095890410959,\n \"acc_norm_stderr,none\"\
: 0.041382249050673066,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.632,\n \"acc_norm_stderr,none\": 0.030562070620993163,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.696,\n\
\ \"acc_norm_stderr,none\": 0.029150213374159677,\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.47752808988764045,\n\
\ \"acc_norm_stderr,none\": 0.03754432508487193,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.804,\n \"acc_norm_stderr,none\": 0.02515685731325593,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.336,\n \
\ \"acc_norm_stderr,none\": 0.029933259094191516,\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\"\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"acc_norm,none\": 0.188,\n \"acc_norm_stderr,none\": 0.02476037772775051,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.192,\n \"acc_norm_stderr,none\": 0.02496069198917202,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.384,\n \"acc_norm_stderr,none\": 0.030821679117375378,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.484,\n\
\ \"acc_norm_stderr,none\": 0.03166998503010742,\n \"alias\": \" \
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\": 0.01270453266853576,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.26262626262626265,\n \"acc_norm_stderr,none\"\
: 0.031353050095330855,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.2692307692307692,\n\
\ \"acc_norm_stderr,none\": 0.019000027142915377,\n \"alias\": \"\
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.24553571428571427,\n \"acc_norm_stderr,none\"\
: 0.020357428454484575,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.6303142329020333,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020772943616332303,\n \
\ \"inst_level_strict_acc,none\": 0.7266187050359713,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7319778188539742,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.019060638691630282,\n \"inst_level_loose_acc,none\"\
: 0.8105515587529976,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.08836858006042296,\n \"exact_match_stderr,none\"\
: 0.007640587008385335,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.12052117263843648,\n\
\ \"exact_match_stderr,none\": 0.018611597654928206,\n \"alias\":\
\ \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.032520325203252036,\n \"exact_match_stderr,none\"\
: 0.01605899820587974,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.045454545454545456,\n \"exact_match_stderr,none\": 0.018199158975632696,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.02857142857142857,\n \"exact_match_stderr,none\"\
: 0.009973998820736026,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.09740259740259741,\n \"exact_match_stderr,none\": 0.023971024368870247,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.20725388601036268,\n \"exact_match_stderr,none\"\
: 0.02925282329180363,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.05185185185185185,\n \"exact_match_stderr,none\": 0.019154368449050514,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.37175864361702127,\n \"acc_stderr,none\": 0.004405984196843539,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3637566137566138,\n \"acc_norm_stderr,none\"\
: 0.017089741838981088,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.508,\n\
\ \"acc_norm_stderr,none\": 0.031682156431413803,\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.25,\n \"acc_norm_stderr,none\": 0.02711630722733202,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.336,\n \"acc_norm_stderr,none\": 0.029933259094191516,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_ifeval
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-12-59.528765.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T15_12_59.528765
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-12-59.528765.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-12-59.528765.jsonl'
---
# Dataset Card for Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table-details",
name="yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T15-12-59.528765](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table/results_2024-09-29T15-12-59.528765.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.08836858006042296,
"exact_match_stderr,none": 0.007640587008385335,
"acc,none": 0.37175864361702127,
"acc_stderr,none": 0.004405984196843539,
"inst_level_loose_acc,none": 0.8105515587529976,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.7266187050359713,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.4436373070437151,
"acc_norm_stderr,none": 0.005348605915705943,
"prompt_level_strict_acc,none": 0.6303142329020333,
"prompt_level_strict_acc_stderr,none": 0.020772943616332303,
"prompt_level_loose_acc,none": 0.7319778188539742,
"prompt_level_loose_acc_stderr,none": 0.01906063869163028,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4922756465891338,
"acc_norm_stderr,none": 0.006267767960027899,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.808,
"acc_norm_stderr,none": 0.024960691989172005,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.03585560071592546,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.0315632850612134,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.031391810765429407,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.03156328506121339,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578683,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028128,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.030562070620993163,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968782,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.0316821564314138,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626042,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.541095890410959,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.632,
"acc_norm_stderr,none": 0.030562070620993163,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.696,
"acc_norm_stderr,none": 0.029150213374159677,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487193,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.804,
"acc_norm_stderr,none": 0.02515685731325593,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.029933259094191516,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.02476037772775051,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.02496069198917202,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.384,
"acc_norm_stderr,none": 0.030821679117375378,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010742,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6303142329020333,
"prompt_level_strict_acc_stderr,none": 0.020772943616332303,
"inst_level_strict_acc,none": 0.7266187050359713,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7319778188539742,
"prompt_level_loose_acc_stderr,none": 0.019060638691630282,
"inst_level_loose_acc,none": 0.8105515587529976,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.08836858006042296,
"exact_match_stderr,none": 0.007640587008385335,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.12052117263843648,
"exact_match_stderr,none": 0.018611597654928206,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.01605899820587974,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632696,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.02857142857142857,
"exact_match_stderr,none": 0.009973998820736026,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.09740259740259741,
"exact_match_stderr,none": 0.023971024368870247,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.20725388601036268,
"exact_match_stderr,none": 0.02925282329180363,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050514,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.37175864361702127,
"acc_stderr,none": 0.004405984196843539,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.3637566137566138,
"acc_norm_stderr,none": 0.017089741838981088,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.031682156431413803,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.25,
"acc_norm_stderr,none": 0.02711630722733202,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.029933259094191516,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"exact_match,none": 0.08836858006042296,
"exact_match_stderr,none": 0.007640587008385335,
"acc,none": 0.37175864361702127,
"acc_stderr,none": 0.004405984196843539,
"inst_level_loose_acc,none": 0.8105515587529976,
"inst_level_loose_acc_stderr,none": "N/A",
"inst_level_strict_acc,none": 0.7266187050359713,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.4436373070437151,
"acc_norm_stderr,none": 0.005348605915705943,
"prompt_level_strict_acc,none": 0.6303142329020333,
"prompt_level_strict_acc_stderr,none": 0.020772943616332303,
"prompt_level_loose_acc,none": 0.7319778188539742,
"prompt_level_loose_acc_stderr,none": 0.01906063869163028,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4922756465891338,
"acc_norm_stderr,none": 0.006267767960027899,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.808,
"acc_norm_stderr,none": 0.024960691989172005,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.03585560071592546,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.0315632850612134,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.031391810765429407,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.544,
"acc_norm_stderr,none": 0.03156328506121339,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578683,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028128,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.030562070620993163,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968782,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.0316821564314138,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626042,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.541095890410959,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.632,
"acc_norm_stderr,none": 0.030562070620993163,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.696,
"acc_norm_stderr,none": 0.029150213374159677,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487193,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.804,
"acc_norm_stderr,none": 0.02515685731325593,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.029933259094191516,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.02476037772775051,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.192,
"acc_norm_stderr,none": 0.02496069198917202,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.384,
"acc_norm_stderr,none": 0.030821679117375378,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.03166998503010742,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6303142329020333,
"prompt_level_strict_acc_stderr,none": 0.020772943616332303,
"inst_level_strict_acc,none": 0.7266187050359713,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7319778188539742,
"prompt_level_loose_acc_stderr,none": 0.019060638691630282,
"inst_level_loose_acc,none": 0.8105515587529976,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.08836858006042296,
"exact_match_stderr,none": 0.007640587008385335,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.12052117263843648,
"exact_match_stderr,none": 0.018611597654928206,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.032520325203252036,
"exact_match_stderr,none": 0.01605899820587974,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632696,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.02857142857142857,
"exact_match_stderr,none": 0.009973998820736026,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.09740259740259741,
"exact_match_stderr,none": 0.023971024368870247,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.20725388601036268,
"exact_match_stderr,none": 0.02925282329180363,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050514,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.37175864361702127,
"acc_stderr,none": 0.004405984196843539,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.3637566137566138,
"acc_norm_stderr,none": 0.017089741838981088,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.031682156431413803,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.25,
"acc_norm_stderr,none": 0.02711630722733202,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.029933259094191516,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
qklent/ficbook_char_description | qklent | "2024-09-29T15:21:56Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:13:51Z" | ---
dataset_info:
features:
- name: url
dtype: string
- name: likes
dtype: int64
- name: num_chapters
dtype: int64
- name: char_description
dtype: string
- name: avg_likes_for_chapters
dtype: float64
- name: conversation
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 426290649
num_examples: 12171
download_size: 195340398
dataset_size: 426290649
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
HuilongChen/ultrafeedback-gpt-3.5-turbo-helpfulness | HuilongChen | "2024-09-29T15:17:34Z" | 0 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-09-29T15:17:34Z" | ---
license: apache-2.0
---
|
open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001-details | open-llm-leaderboard | "2024-09-29T15:22:29Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:18:45Z" | ---
pretty_name: Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001-details\"\
,\n\tname=\"yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T15-18-45.245227](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001/results_2024-09-29T15-18-45.245227.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.4450642106628616,\n \"acc_norm_stderr,none\"\
: 0.005348427489459913,\n \"exact_match,none\": 0.06722054380664652,\n\
\ \"exact_match_stderr,none\": 0.006704545387077425,\n \"\
prompt_level_loose_acc,none\": 0.7060998151571165,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.019603612015637102,\n \"acc,none\": 0.3702626329787234,\n \
\ \"acc_stderr,none\": 0.00440234233523509,\n \"inst_level_strict_acc,none\"\
: 0.7038369304556354,\n \"inst_level_strict_acc_stderr,none\": \"N/A\"\
,\n \"prompt_level_strict_acc,none\": 0.5970425138632163,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.021107430256731653,\n \
\ \"inst_level_loose_acc,none\": 0.7985611510791367,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.49401145634438465,\n \"acc_norm_stderr,none\"\
: 0.006265662903366742,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\"\
: 0.808,\n \"acc_norm_stderr,none\": 0.024960691989172012,\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.6042780748663101,\n\
\ \"acc_norm_stderr,none\": 0.03585560071592546,\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\"\
: 0.03162125257572551,\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
\n },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\"\
: 0.608,\n \"acc_norm_stderr,none\": 0.0309382076204012,\n \
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"acc_norm,none\": 0.56,\n \
\ \"acc_norm_stderr,none\": 0.03145724452223564,\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\"\
: 0.02946265759857868,\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
\n },\n \"leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\"\
: 0.572,\n \"acc_norm_stderr,none\": 0.0313559689237726,\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.376,\n \"acc_norm_stderr,none\"\
: 0.030696336267394587,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.456,\n \"acc_norm_stderr,none\": 0.03156328506121339,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\": 0.03163648953154439,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.54,\n\
\ \"acc_norm_stderr,none\": 0.031584653891499,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\"\
: 0.030630325944558313,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.541095890410959,\n \"acc_norm_stderr,none\": 0.041382249050673066,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.64,\n \"acc_norm_stderr,none\": 0.030418764025174995,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.692,\n \"acc_norm_stderr,none\": 0.029256928606501864,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\"\
: 0.031621252575725504,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.47752808988764045,\n \"acc_norm_stderr,none\": 0.03754432508487193,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.792,\n \"acc_norm_stderr,none\"\
: 0.02572139890141639,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475376,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422256,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\":\
\ 0.02534897002097908,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\"\
: 0.030881038748993915,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\"\
: 0.01270453266853576,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.26262626262626265,\n\
\ \"acc_norm_stderr,none\": 0.031353050095330855,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.2692307692307692,\n \"acc_norm_stderr,none\"\
: 0.019000027142915377,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.24553571428571427,\n \"acc_norm_stderr,none\": 0.020357428454484575,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.5970425138632163,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.021107430256731653,\n \
\ \"inst_level_strict_acc,none\": 0.7038369304556354,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7060998151571165,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.019603612015637102,\n \
\ \"inst_level_loose_acc,none\": 0.7985611510791367,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.06722054380664652,\n\
\ \"exact_match_stderr,none\": 0.006704545387077425,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.11074918566775244,\n \"exact_match_stderr,none\"\
: 0.017939969209400993,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.024390243902439025,\n \"exact_match_stderr,none\"\
: 0.013965813032045563,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.030303030303030304,\n \"exact_match_stderr,none\": 0.014977019714308242,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.017857142857142856,\n \"exact_match_stderr,none\": 0.00792850338788885,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.025974025974025976,\n \"exact_match_stderr,none\": 0.012859058999697082,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.18134715025906736,\n \"exact_match_stderr,none\": 0.02780703236068609,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.02962962962962963,\n \"exact_match_stderr,none\": 0.014648038602753793,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n\
\ \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.3702626329787234,\n\
\ \"acc_stderr,none\": 0.00440234233523509,\n \"alias\": \"\
\ - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.36507936507936506,\n \"acc_norm_stderr,none\"\
: 0.017128762028354172,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\"\
: 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896,\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"\
leaderboard_musr_object_placements\": {\n \"acc_norm,none\": 0.24609375,\n\
\ \"acc_norm_stderr,none\": 0.026973597563786113,\n \"alias\"\
: \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.352,\n \"acc_norm_stderr,none\"\
: 0.030266288057359935,\n \"alias\": \" - leaderboard_musr_team_allocation\"\
\n }\n },\n \"leaderboard\": {\n \"acc_norm,none\": 0.4450642106628616,\n\
\ \"acc_norm_stderr,none\": 0.005348427489459913,\n \"exact_match,none\"\
: 0.06722054380664652,\n \"exact_match_stderr,none\": 0.006704545387077425,\n\
\ \"prompt_level_loose_acc,none\": 0.7060998151571165,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.019603612015637102,\n \"acc,none\": 0.3702626329787234,\n \"acc_stderr,none\"\
: 0.00440234233523509,\n \"inst_level_strict_acc,none\": 0.7038369304556354,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.5970425138632163,\n \"prompt_level_strict_acc_stderr,none\": 0.021107430256731653,\n\
\ \"inst_level_loose_acc,none\": 0.7985611510791367,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.49401145634438465,\n \"acc_norm_stderr,none\"\
: 0.006265662903366742,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\": 0.808,\n\
\ \"acc_norm_stderr,none\": 0.024960691989172012,\n \"alias\": \"\
\ - leaderboard_bbh_boolean_expressions\"\n },\n \"leaderboard_bbh_causal_judgement\"\
: {\n \"acc_norm,none\": 0.6042780748663101,\n \"acc_norm_stderr,none\"\
: 0.03585560071592546,\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
\n },\n \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.468,\n \"acc_norm_stderr,none\": 0.03162125257572551,\n \"alias\"\
: \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.608,\n \"acc_norm_stderr,none\": 0.0309382076204012,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223564,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.02946265759857868,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.572,\n \"acc_norm_stderr,none\": 0.0313559689237726,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.376,\n \"acc_norm_stderr,none\": 0.030696336267394587,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.456,\n \"acc_norm_stderr,none\": 0.03156328506121339,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\": 0.03163648953154439,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\": 0.030630325944558313,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.541095890410959,\n \"acc_norm_stderr,none\"\
: 0.041382249050673066,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.64,\n \"acc_norm_stderr,none\": 0.030418764025174995,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.692,\n\
\ \"acc_norm_stderr,none\": 0.029256928606501864,\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.47752808988764045,\n\
\ \"acc_norm_stderr,none\": 0.03754432508487193,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.792,\n \"acc_norm_stderr,none\": 0.02572139890141639,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.328,\n \
\ \"acc_norm_stderr,none\": 0.029752391824475376,\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\"\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422256,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097908,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\": 0.030881038748993915,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.48,\n\
\ \"acc_norm_stderr,none\": 0.031660853408495185,\n \"alias\": \"\
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\": 0.01270453266853576,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.26262626262626265,\n \"acc_norm_stderr,none\"\
: 0.031353050095330855,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.2692307692307692,\n\
\ \"acc_norm_stderr,none\": 0.019000027142915377,\n \"alias\": \"\
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.24553571428571427,\n \"acc_norm_stderr,none\"\
: 0.020357428454484575,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.5970425138632163,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.021107430256731653,\n \
\ \"inst_level_strict_acc,none\": 0.7038369304556354,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7060998151571165,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.019603612015637102,\n \"inst_level_loose_acc,none\"\
: 0.7985611510791367,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.06722054380664652,\n \"exact_match_stderr,none\"\
: 0.006704545387077425,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.11074918566775244,\n\
\ \"exact_match_stderr,none\": 0.017939969209400993,\n \"alias\":\
\ \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.024390243902439025,\n \"exact_match_stderr,none\"\
: 0.013965813032045563,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.030303030303030304,\n \"exact_match_stderr,none\": 0.014977019714308242,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.017857142857142856,\n \"exact_match_stderr,none\"\
: 0.00792850338788885,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.025974025974025976,\n \"exact_match_stderr,none\": 0.012859058999697082,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.18134715025906736,\n \"exact_match_stderr,none\"\
: 0.02780703236068609,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.02962962962962963,\n \"exact_match_stderr,none\": 0.014648038602753793,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.3702626329787234,\n \"acc_stderr,none\": 0.00440234233523509,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.36507936507936506,\n \"acc_norm_stderr,none\"\
: 0.017128762028354172,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.5,\n \
\ \"acc_norm_stderr,none\": 0.031686212526223896,\n \"alias\": \" \
\ - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.24609375,\n \"acc_norm_stderr,none\": 0.026973597563786113,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.352,\n \"acc_norm_stderr,none\": 0.030266288057359935,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_ifeval
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-18-45.245227.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T15_18_45.245227
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-18-45.245227.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-18-45.245227.jsonl'
---
# Dataset Card for Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001-details",
name="yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T15-18-45.245227](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001/results_2024-09-29T15-18-45.245227.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.4450642106628616,
"acc_norm_stderr,none": 0.005348427489459913,
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006704545387077425,
"prompt_level_loose_acc,none": 0.7060998151571165,
"prompt_level_loose_acc_stderr,none": 0.019603612015637102,
"acc,none": 0.3702626329787234,
"acc_stderr,none": 0.00440234233523509,
"inst_level_strict_acc,none": 0.7038369304556354,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.5970425138632163,
"prompt_level_strict_acc_stderr,none": 0.021107430256731653,
"inst_level_loose_acc,none": 0.7985611510791367,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.49401145634438465,
"acc_norm_stderr,none": 0.006265662903366742,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.808,
"acc_norm_stderr,none": 0.024960691989172012,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.03585560071592546,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572551,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.0309382076204012,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.02946265759857868,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.572,
"acc_norm_stderr,none": 0.0313559689237726,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.030696336267394587,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.03156328506121339,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.03163648953154439,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.030630325944558313,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.541095890410959,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.64,
"acc_norm_stderr,none": 0.030418764025174995,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.029256928606501864,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487193,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.792,
"acc_norm_stderr,none": 0.02572139890141639,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475376,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422256,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097908,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993915,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.5970425138632163,
"prompt_level_strict_acc_stderr,none": 0.021107430256731653,
"inst_level_strict_acc,none": 0.7038369304556354,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7060998151571165,
"prompt_level_loose_acc_stderr,none": 0.019603612015637102,
"inst_level_loose_acc,none": 0.7985611510791367,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006704545387077425,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.11074918566775244,
"exact_match_stderr,none": 0.017939969209400993,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.024390243902439025,
"exact_match_stderr,none": 0.013965813032045563,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308242,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.017857142857142856,
"exact_match_stderr,none": 0.00792850338788885,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697082,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.18134715025906736,
"exact_match_stderr,none": 0.02780703236068609,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753793,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.3702626329787234,
"acc_stderr,none": 0.00440234233523509,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.36507936507936506,
"acc_norm_stderr,none": 0.017128762028354172,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.24609375,
"acc_norm_stderr,none": 0.026973597563786113,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359935,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"acc_norm,none": 0.4450642106628616,
"acc_norm_stderr,none": 0.005348427489459913,
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006704545387077425,
"prompt_level_loose_acc,none": 0.7060998151571165,
"prompt_level_loose_acc_stderr,none": 0.019603612015637102,
"acc,none": 0.3702626329787234,
"acc_stderr,none": 0.00440234233523509,
"inst_level_strict_acc,none": 0.7038369304556354,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.5970425138632163,
"prompt_level_strict_acc_stderr,none": 0.021107430256731653,
"inst_level_loose_acc,none": 0.7985611510791367,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.49401145634438465,
"acc_norm_stderr,none": 0.006265662903366742,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.808,
"acc_norm_stderr,none": 0.024960691989172012,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.03585560071592546,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.03162125257572551,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.0309382076204012,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.02946265759857868,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.572,
"acc_norm_stderr,none": 0.0313559689237726,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.030696336267394587,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.456,
"acc_norm_stderr,none": 0.03156328506121339,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.03163648953154439,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.030630325944558313,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.541095890410959,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.64,
"acc_norm_stderr,none": 0.030418764025174995,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.692,
"acc_norm_stderr,none": 0.029256928606501864,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487193,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.792,
"acc_norm_stderr,none": 0.02572139890141639,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475376,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422256,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097908,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993915,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.5970425138632163,
"prompt_level_strict_acc_stderr,none": 0.021107430256731653,
"inst_level_strict_acc,none": 0.7038369304556354,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7060998151571165,
"prompt_level_loose_acc_stderr,none": 0.019603612015637102,
"inst_level_loose_acc,none": 0.7985611510791367,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06722054380664652,
"exact_match_stderr,none": 0.006704545387077425,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.11074918566775244,
"exact_match_stderr,none": 0.017939969209400993,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.024390243902439025,
"exact_match_stderr,none": 0.013965813032045563,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308242,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.017857142857142856,
"exact_match_stderr,none": 0.00792850338788885,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.025974025974025976,
"exact_match_stderr,none": 0.012859058999697082,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.18134715025906736,
"exact_match_stderr,none": 0.02780703236068609,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753793,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.3702626329787234,
"acc_stderr,none": 0.00440234233523509,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.36507936507936506,
"acc_norm_stderr,none": 0.017128762028354172,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.24609375,
"acc_norm_stderr,none": 0.026973597563786113,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.352,
"acc_norm_stderr,none": 0.030266288057359935,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
VaggP/Eedi-competition-kaggle-prompt-formats | VaggP | "2024-09-29T15:18:55Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:18:53Z" | ---
dataset_info:
features:
- name: conversations
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 2574794
num_examples: 4370
download_size: 519586
dataset_size: 2574794
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
ServiceNow/BigDoc-FT-v0.10 | ServiceNow | "2024-09-29T16:31:35Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:18:57Z" | ---
dataset_info:
features:
- name: sample_id
dtype: string
- name: dataset_name
dtype: string
- name: annotations
sequence: string
- name: queries
sequence: string
- name: task_name
dtype: string
- name: image
dtype: binary
splits:
- name: train
num_bytes: 132648482024
num_examples: 547462
- name: test
num_bytes: 975237195
num_examples: 5650
- name: hidden_test
num_bytes: 930561355
num_examples: 5650
- name: val
num_bytes: 2068762564
num_examples: 11300
download_size: 133494944371
dataset_size: 136623043138
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: hidden_test
path: data/hidden_test-*
- split: val
path: data/val-*
---
|
motorfireman1/finetuning_demo22 | motorfireman1 | "2024-09-29T15:20:32Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:20:29Z" | ---
dataset_info:
features:
- name: prompt
dtype: string
splits:
- name: train
num_bytes: 232459
num_examples: 31
download_size: 47314
dataset_size: 232459
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
heikal/arabic_call_splitted_7 | heikal | "2024-09-29T15:32:46Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:21:14Z" | ---
dataset_info:
features:
- name: client_id
dtype: string
- name: audio
dtype: audio
- name: sentence
dtype: string
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 74258852.0
num_examples: 80
- name: test
num_bytes: 7367883.0
num_examples: 9
download_size: 79719147
dataset_size: 81626735.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table-details | open-llm-leaderboard | "2024-09-29T15:28:44Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:24:35Z" | ---
pretty_name: Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table-details\"\
,\n\tname=\"yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T15-24-35.214199](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table/results_2024-09-29T15-24-35.214199.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_loose_acc,none\": 0.8369304556354916,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\":\
\ 0.08610271903323263,\n \"exact_match_stderr,none\": 0.007549162688514089,\n\
\ \"acc,none\": 0.36826795212765956,\n \"acc_stderr,none\"\
: 0.0043974160240703485,\n \"acc_norm,none\": 0.4509015436502789,\n \
\ \"acc_norm_stderr,none\": 0.005347396617859254,\n \"prompt_level_strict_acc,none\"\
: 0.6709796672828097,\n \"prompt_level_strict_acc_stderr,none\": 0.020219442118538433,\n\
\ \"inst_level_strict_acc,none\": 0.7553956834532374,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.7726432532347505,\n \"prompt_level_loose_acc_stderr,none\": 0.01803626267364009,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5011282763409131,\n \"acc_norm_stderr,none\"\
: 0.0062614921080603415,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\"\
: 0.788,\n \"acc_norm_stderr,none\": 0.025901884690541156,\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5882352941176471,\n\
\ \"acc_norm_stderr,none\": 0.03608640563085621,\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\"\
: 0.031603975145223735,\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
\n },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\"\
: 0.628,\n \"acc_norm_stderr,none\": 0.030630325944558317,\n \
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"acc_norm,none\": 0.588,\n \
\ \"acc_norm_stderr,none\": 0.031191596026022894,\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.308,\n \"acc_norm_stderr,none\"\
: 0.029256928606501864,\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
\n },\n \"leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\"\
: 0.568,\n \"acc_norm_stderr,none\": 0.03139181076542941,\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\"\
: 0.03088103874899391,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254709,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554574,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\": 0.0316364895315444,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.552,\n\
\ \"acc_norm_stderr,none\": 0.031514387611153515,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\"\
: 0.030760116042626042,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.5342465753424658,\n \"acc_norm_stderr,none\": 0.04142522736934775,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.648,\n \"acc_norm_stderr,none\": 0.030266288057359942,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.716,\n \"acc_norm_stderr,none\": 0.0285769587304374,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\"\
: 0.03164968895968781,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.4943820224719101,\n \"acc_norm_stderr,none\": 0.03757992900475981,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.796,\n \"acc_norm_stderr,none\"\
: 0.02553712157454814,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\": 0.029150213374159673,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750495,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\"\
: 0.02590188469054116,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\"\
: 0.0309382076204012,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968782,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\"\
: 0.01270453266853576,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.26262626262626265,\n\
\ \"acc_norm_stderr,none\": 0.031353050095330855,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.2692307692307692,\n \"acc_norm_stderr,none\"\
: 0.019000027142915377,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.24553571428571427,\n \"acc_norm_stderr,none\": 0.020357428454484575,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.6709796672828097,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.020219442118538433,\n \
\ \"inst_level_strict_acc,none\": 0.7553956834532374,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7726432532347505,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01803626267364009,\n \
\ \"inst_level_loose_acc,none\": 0.8369304556354916,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.08610271903323263,\n\
\ \"exact_match_stderr,none\": 0.007549162688514089,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.13029315960912052,\n \"exact_match_stderr,none\"\
: 0.01924360959782677,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.04878048780487805,\n \"exact_match_stderr,none\"\
: 0.01950219655858807,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.022727272727272728,\n \"exact_match_stderr,none\": 0.013021046909063713,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.03214285714285714,\n \"exact_match_stderr,none\": 0.010559558661753198,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.08441558441558442,\n \"exact_match_stderr,none\": 0.02247578123186701,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.19689119170984457,\n \"exact_match_stderr,none\": 0.028697873971860695,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.037037037037037035,\n \"exact_match_stderr,none\": 0.016314377626726054,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n\
\ \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.36826795212765956,\n\
\ \"acc_stderr,none\": 0.0043974160240703485,\n \"alias\"\
: \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.37037037037037035,\n \"acc_norm_stderr,none\"\
: 0.017183843300707654,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\"\
: 0.504,\n \"acc_norm_stderr,none\": 0.031685198551199154,\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"\
leaderboard_musr_object_placements\": {\n \"acc_norm,none\": 0.25,\n\
\ \"acc_norm_stderr,none\": 0.02711630722733202,\n \"alias\"\
: \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\"\
: 0.03041876402517498,\n \"alias\": \" - leaderboard_musr_team_allocation\"\
\n }\n },\n \"leaderboard\": {\n \"inst_level_loose_acc,none\"\
: 0.8369304556354916,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"exact_match,none\": 0.08610271903323263,\n \"exact_match_stderr,none\"\
: 0.007549162688514089,\n \"acc,none\": 0.36826795212765956,\n \"\
acc_stderr,none\": 0.0043974160240703485,\n \"acc_norm,none\": 0.4509015436502789,\n\
\ \"acc_norm_stderr,none\": 0.005347396617859254,\n \"prompt_level_strict_acc,none\"\
: 0.6709796672828097,\n \"prompt_level_strict_acc_stderr,none\": 0.020219442118538433,\n\
\ \"inst_level_strict_acc,none\": 0.7553956834532374,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7726432532347505,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01803626267364009,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.5011282763409131,\n \"acc_norm_stderr,none\": 0.0062614921080603415,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.788,\n \"acc_norm_stderr,none\": 0.025901884690541156,\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5882352941176471,\n\
\ \"acc_norm_stderr,none\": 0.03608640563085621,\n \"alias\": \" \
\ - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.031603975145223735,\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.628,\n \"acc_norm_stderr,none\": 0.030630325944558317,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.588,\n \"acc_norm_stderr,none\": 0.031191596026022894,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.308,\n \"acc_norm_stderr,none\": 0.029256928606501864,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\": 0.03139181076542941,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\": 0.03088103874899391,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254709,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.556,\n \"acc_norm_stderr,none\": 0.03148684942554574,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.528,\n \"acc_norm_stderr,none\": 0.0316364895315444,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.031514387611153515,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.38,\n \"acc_norm_stderr,none\": 0.030760116042626042,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.5342465753424658,\n \"acc_norm_stderr,none\"\
: 0.04142522736934775,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.648,\n \"acc_norm_stderr,none\": 0.030266288057359942,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.716,\n\
\ \"acc_norm_stderr,none\": 0.0285769587304374,\n \"alias\": \" -\
\ leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968781,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.4943820224719101,\n\
\ \"acc_norm_stderr,none\": 0.03757992900475981,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.796,\n \"acc_norm_stderr,none\": 0.02553712157454814,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.304,\n \
\ \"acc_norm_stderr,none\": 0.029150213374159673,\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\"\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"acc_norm,none\": 0.188,\n \"acc_norm_stderr,none\": 0.024760377727750495,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.02590188469054116,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.0309382076204012,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.476,\n\
\ \"acc_norm_stderr,none\": 0.03164968895968782,\n \"alias\": \" \
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\": 0.01270453266853576,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.26262626262626265,\n \"acc_norm_stderr,none\"\
: 0.031353050095330855,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.2692307692307692,\n\
\ \"acc_norm_stderr,none\": 0.019000027142915377,\n \"alias\": \"\
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.24553571428571427,\n \"acc_norm_stderr,none\"\
: 0.020357428454484575,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.6709796672828097,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020219442118538433,\n \
\ \"inst_level_strict_acc,none\": 0.7553956834532374,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7726432532347505,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01803626267364009,\n \"inst_level_loose_acc,none\"\
: 0.8369304556354916,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.08610271903323263,\n \"exact_match_stderr,none\"\
: 0.007549162688514089,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.13029315960912052,\n\
\ \"exact_match_stderr,none\": 0.01924360959782677,\n \"alias\": \"\
\ - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.04878048780487805,\n \"exact_match_stderr,none\"\
: 0.01950219655858807,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.022727272727272728,\n \"exact_match_stderr,none\": 0.013021046909063713,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.03214285714285714,\n \"exact_match_stderr,none\"\
: 0.010559558661753198,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.08441558441558442,\n \"exact_match_stderr,none\": 0.02247578123186701,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.19689119170984457,\n \"exact_match_stderr,none\"\
: 0.028697873971860695,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.037037037037037035,\n \"exact_match_stderr,none\": 0.016314377626726054,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.36826795212765956,\n \"acc_stderr,none\": 0.0043974160240703485,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.37037037037037035,\n \"acc_norm_stderr,none\"\
: 0.017183843300707654,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.504,\n\
\ \"acc_norm_stderr,none\": 0.031685198551199154,\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.25,\n \"acc_norm_stderr,none\": 0.02711630722733202,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\": 0.03041876402517498,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_ifeval
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-24-35.214199.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T15_24_35.214199
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-24-35.214199.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-24-35.214199.jsonl'
---
# Dataset Card for Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table-details",
name="yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T15-24-35.214199](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table/results_2024-09-29T15-24-35.214199.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_loose_acc,none": 0.8369304556354916,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.08610271903323263,
"exact_match_stderr,none": 0.007549162688514089,
"acc,none": 0.36826795212765956,
"acc_stderr,none": 0.0043974160240703485,
"acc_norm,none": 0.4509015436502789,
"acc_norm_stderr,none": 0.005347396617859254,
"prompt_level_strict_acc,none": 0.6709796672828097,
"prompt_level_strict_acc_stderr,none": 0.020219442118538433,
"inst_level_strict_acc,none": 0.7553956834532374,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7726432532347505,
"prompt_level_loose_acc_stderr,none": 0.01803626267364009,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5011282763409131,
"acc_norm_stderr,none": 0.0062614921080603415,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.788,
"acc_norm_stderr,none": 0.025901884690541156,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5882352941176471,
"acc_norm_stderr,none": 0.03608640563085621,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.031603975145223735,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.030630325944558317,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022894,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.029256928606501864,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.03088103874899391,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554574,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.0316364895315444,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.031514387611153515,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626042,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.5342465753424658,
"acc_norm_stderr,none": 0.04142522736934775,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.030266288057359942,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.716,
"acc_norm_stderr,none": 0.0285769587304374,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968781,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.4943820224719101,
"acc_norm_stderr,none": 0.03757992900475981,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.02553712157454814,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.029150213374159673,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750495,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.02590188469054116,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.0309382076204012,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968782,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6709796672828097,
"prompt_level_strict_acc_stderr,none": 0.020219442118538433,
"inst_level_strict_acc,none": 0.7553956834532374,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7726432532347505,
"prompt_level_loose_acc_stderr,none": 0.01803626267364009,
"inst_level_loose_acc,none": 0.8369304556354916,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.08610271903323263,
"exact_match_stderr,none": 0.007549162688514089,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.13029315960912052,
"exact_match_stderr,none": 0.01924360959782677,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.04878048780487805,
"exact_match_stderr,none": 0.01950219655858807,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.013021046909063713,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.03214285714285714,
"exact_match_stderr,none": 0.010559558661753198,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.08441558441558442,
"exact_match_stderr,none": 0.02247578123186701,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.19689119170984457,
"exact_match_stderr,none": 0.028697873971860695,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.037037037037037035,
"exact_match_stderr,none": 0.016314377626726054,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.36826795212765956,
"acc_stderr,none": 0.0043974160240703485,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.37037037037037035,
"acc_norm_stderr,none": 0.017183843300707654,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.031685198551199154,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.25,
"acc_norm_stderr,none": 0.02711630722733202,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517498,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"inst_level_loose_acc,none": 0.8369304556354916,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.08610271903323263,
"exact_match_stderr,none": 0.007549162688514089,
"acc,none": 0.36826795212765956,
"acc_stderr,none": 0.0043974160240703485,
"acc_norm,none": 0.4509015436502789,
"acc_norm_stderr,none": 0.005347396617859254,
"prompt_level_strict_acc,none": 0.6709796672828097,
"prompt_level_strict_acc_stderr,none": 0.020219442118538433,
"inst_level_strict_acc,none": 0.7553956834532374,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7726432532347505,
"prompt_level_loose_acc_stderr,none": 0.01803626267364009,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5011282763409131,
"acc_norm_stderr,none": 0.0062614921080603415,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.788,
"acc_norm_stderr,none": 0.025901884690541156,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5882352941176471,
"acc_norm_stderr,none": 0.03608640563085621,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.031603975145223735,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.030630325944558317,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.588,
"acc_norm_stderr,none": 0.031191596026022894,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.029256928606501864,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.03088103874899391,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.556,
"acc_norm_stderr,none": 0.03148684942554574,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.0316364895315444,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.031514387611153515,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.38,
"acc_norm_stderr,none": 0.030760116042626042,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.5342465753424658,
"acc_norm_stderr,none": 0.04142522736934775,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.030266288057359942,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.716,
"acc_norm_stderr,none": 0.0285769587304374,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968781,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.4943820224719101,
"acc_norm_stderr,none": 0.03757992900475981,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.02553712157454814,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.029150213374159673,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.188,
"acc_norm_stderr,none": 0.024760377727750495,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.02590188469054116,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.0309382076204012,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968782,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6709796672828097,
"prompt_level_strict_acc_stderr,none": 0.020219442118538433,
"inst_level_strict_acc,none": 0.7553956834532374,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7726432532347505,
"prompt_level_loose_acc_stderr,none": 0.01803626267364009,
"inst_level_loose_acc,none": 0.8369304556354916,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.08610271903323263,
"exact_match_stderr,none": 0.007549162688514089,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.13029315960912052,
"exact_match_stderr,none": 0.01924360959782677,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.04878048780487805,
"exact_match_stderr,none": 0.01950219655858807,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.022727272727272728,
"exact_match_stderr,none": 0.013021046909063713,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.03214285714285714,
"exact_match_stderr,none": 0.010559558661753198,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.08441558441558442,
"exact_match_stderr,none": 0.02247578123186701,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.19689119170984457,
"exact_match_stderr,none": 0.028697873971860695,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.037037037037037035,
"exact_match_stderr,none": 0.016314377626726054,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.36826795212765956,
"acc_stderr,none": 0.0043974160240703485,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.37037037037037035,
"acc_norm_stderr,none": 0.017183843300707654,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.504,
"acc_norm_stderr,none": 0.031685198551199154,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.25,
"acc_norm_stderr,none": 0.02711630722733202,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517498,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
PurpleAILAB/chatML_SQL_injection_dataset | PurpleAILAB | "2024-09-29T15:33:17Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:30:05Z" | ---
dataset_info:
features:
- name: conversations
struct:
- name: from
dtype: string
- name: value
dtype: string
splits:
- name: train
num_bytes: 417230
num_examples: 3992
download_size: 57358
dataset_size: 417230
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
PurpleAILAB/alpaca_SQL_injection_dataset | PurpleAILAB | "2024-09-29T15:30:08Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:30:07Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 393278
num_examples: 1996
download_size: 55184
dataset_size: 393278
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002-details | open-llm-leaderboard | "2024-09-29T15:35:31Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:31:51Z" | ---
pretty_name: Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002-details\"\
,\n\tname=\"yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T15-31-50.382131](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002/results_2024-09-29T15-31-50.382131.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.7470023980815348,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.7652495378927912,\n \"prompt_level_loose_acc_stderr,none\": 0.01823928821343378,\n\
\ \"inst_level_loose_acc,none\": 0.8357314148681055,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.366938164893617,\n\
\ \"acc_stderr,none\": 0.00439408693623272,\n \"prompt_level_strict_acc,none\"\
: 0.6561922365988909,\n \"prompt_level_strict_acc_stderr,none\": 0.020439793487859976,\n\
\ \"exact_match,none\": 0.06570996978851963,\n \"exact_match_stderr,none\"\
: 0.0066807029923575765,\n \"acc_norm,none\": 0.44882604747697497,\n\
\ \"acc_norm_stderr,none\": 0.005342481492488792,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.4974830758548863,\n \"acc_norm_stderr,none\": 0.006249798665250531,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.788,\n \"acc_norm_stderr,none\"\
: 0.025901884690541156,\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
\n },\n \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\"\
: 0.5882352941176471,\n \"acc_norm_stderr,none\": 0.03608640563085621,\n\
\ \"alias\": \" - leaderboard_bbh_causal_judgement\"\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.484,\n \"acc_norm_stderr,none\": 0.031669985030107414,\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \
\ \"leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\": 0.624,\n\
\ \"acc_norm_stderr,none\": 0.03069633626739458,\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\"\
: 0.03139181076542941,\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
\n },\n \"leaderboard_bbh_geometric_shapes\": {\n \"acc_norm,none\"\
: 0.304,\n \"acc_norm_stderr,none\": 0.029150213374159673,\n \
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"\
leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\": 0.592,\n \
\ \"acc_norm_stderr,none\": 0.03114520984654849,\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\"\
: 0.030938207620401195,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522373,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.031514387611153515,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.031669985030107414,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.536,\n\
\ \"acc_norm_stderr,none\": 0.03160397514522375,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\"\
: 0.030630325944558313,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.541095890410959,\n \"acc_norm_stderr,none\": 0.041382249050673066,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.0301865684645117,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.712,\n \"acc_norm_stderr,none\": 0.02869700458739821,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\"\
: 0.031660853408495185,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.4943820224719101,\n \"acc_norm_stderr,none\": 0.03757992900475981,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.796,\n \"acc_norm_stderr,none\"\
: 0.02553712157454814,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.272,\n \"acc_norm_stderr,none\": 0.02820008829631,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422256,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\"\
: 0.02590188469054116,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.376,\n \"acc_norm_stderr,none\"\
: 0.030696336267394583,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\"\
: 0.01270453266853576,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.26262626262626265,\n\
\ \"acc_norm_stderr,none\": 0.031353050095330855,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.2692307692307692,\n \"acc_norm_stderr,none\"\
: 0.019000027142915377,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.24553571428571427,\n \"acc_norm_stderr,none\": 0.020357428454484575,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.6561922365988909,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.020439793487859976,\n \
\ \"inst_level_strict_acc,none\": 0.7470023980815348,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7652495378927912,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01823928821343378,\n \
\ \"inst_level_loose_acc,none\": 0.8357314148681055,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.06570996978851963,\n\
\ \"exact_match_stderr,none\": 0.006680702992357577,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.10423452768729642,\n \"exact_match_stderr,none\"\
: 0.017467962356999933,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.04065040650406504,\n \"exact_match_stderr,none\"\
: 0.017878907564437455,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.045454545454545456,\n \"exact_match_stderr,none\": 0.018199158975632714,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.010714285714285714,\n \"exact_match_stderr,none\": 0.00616368419476161,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.03896103896103896,\n \"exact_match_stderr,none\": 0.015643720451650293,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.16062176165803108,\n \"exact_match_stderr,none\": 0.026499057701397447,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.02962962962962963,\n \"exact_match_stderr,none\": 0.014648038602753793,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n\
\ \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.366938164893617,\n\
\ \"acc_stderr,none\": 0.00439408693623272,\n \"alias\": \"\
\ - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.376984126984127,\n \"acc_norm_stderr,none\"\
: 0.017271983959259288,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\"\
: 0.508,\n \"acc_norm_stderr,none\": 0.0316821564314138,\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"\
leaderboard_musr_object_placements\": {\n \"acc_norm,none\": 0.26171875,\n\
\ \"acc_norm_stderr,none\": 0.027526959754524398,\n \"alias\"\
: \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\"\
: 0.03049155522040555,\n \"alias\": \" - leaderboard_musr_team_allocation\"\
\n }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.7470023980815348,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_loose_acc,none\": 0.7652495378927912,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.01823928821343378,\n \"inst_level_loose_acc,none\": 0.8357314148681055,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.366938164893617,\n\
\ \"acc_stderr,none\": 0.00439408693623272,\n \"prompt_level_strict_acc,none\"\
: 0.6561922365988909,\n \"prompt_level_strict_acc_stderr,none\": 0.020439793487859976,\n\
\ \"exact_match,none\": 0.06570996978851963,\n \"exact_match_stderr,none\"\
: 0.0066807029923575765,\n \"acc_norm,none\": 0.44882604747697497,\n \
\ \"acc_norm_stderr,none\": 0.005342481492488792,\n \"alias\": \"leaderboard\"\
\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.4974830758548863,\n\
\ \"acc_norm_stderr,none\": 0.006249798665250531,\n \"alias\": \"\
\ - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\": {\n\
\ \"acc_norm,none\": 0.788,\n \"acc_norm_stderr,none\": 0.025901884690541156,\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5882352941176471,\n\
\ \"acc_norm_stderr,none\": 0.03608640563085621,\n \"alias\": \" \
\ - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.484,\n \"acc_norm_stderr,none\": 0.031669985030107414,\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.624,\n \"acc_norm_stderr,none\": 0.03069633626739458,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.568,\n \"acc_norm_stderr,none\": 0.03139181076542941,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\": 0.029150213374159673,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.592,\n \"acc_norm_stderr,none\": 0.03114520984654849,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.392,\n \"acc_norm_stderr,none\": 0.030938207620401195,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.464,\n \"acc_norm_stderr,none\": 0.03160397514522373,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.031514387611153515,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.031669985030107414,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.536,\n \"acc_norm_stderr,none\": 0.03160397514522375,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\": 0.030630325944558313,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.541095890410959,\n \"acc_norm_stderr,none\"\
: 0.041382249050673066,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.0301865684645117,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.712,\n\
\ \"acc_norm_stderr,none\": 0.02869700458739821,\n \"alias\": \" \
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.4943820224719101,\n\
\ \"acc_norm_stderr,none\": 0.03757992900475981,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.796,\n \"acc_norm_stderr,none\": 0.02553712157454814,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.272,\n \
\ \"acc_norm_stderr,none\": 0.02820008829631,\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.184,\n \"acc_norm_stderr,none\": 0.02455581299422256,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.212,\n \"acc_norm_stderr,none\": 0.02590188469054116,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.376,\n \"acc_norm_stderr,none\": 0.030696336267394583,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.48,\n\
\ \"acc_norm_stderr,none\": 0.031660853408495185,\n \"alias\": \"\
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.25922818791946306,\n \"acc_norm_stderr,none\": 0.01270453266853576,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.26262626262626265,\n \"acc_norm_stderr,none\"\
: 0.031353050095330855,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.2692307692307692,\n\
\ \"acc_norm_stderr,none\": 0.019000027142915377,\n \"alias\": \"\
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.24553571428571427,\n \"acc_norm_stderr,none\"\
: 0.020357428454484575,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.6561922365988909,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.020439793487859976,\n \
\ \"inst_level_strict_acc,none\": 0.7470023980815348,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.7652495378927912,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01823928821343378,\n \"inst_level_loose_acc,none\"\
: 0.8357314148681055,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.06570996978851963,\n \"exact_match_stderr,none\"\
: 0.006680702992357577,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.10423452768729642,\n\
\ \"exact_match_stderr,none\": 0.017467962356999933,\n \"alias\":\
\ \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.04065040650406504,\n \"exact_match_stderr,none\"\
: 0.017878907564437455,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.045454545454545456,\n \"exact_match_stderr,none\": 0.018199158975632714,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.010714285714285714,\n \"exact_match_stderr,none\"\
: 0.00616368419476161,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.03896103896103896,\n \"exact_match_stderr,none\": 0.015643720451650293,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.16062176165803108,\n \"exact_match_stderr,none\"\
: 0.026499057701397447,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.02962962962962963,\n \"exact_match_stderr,none\": 0.014648038602753793,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.366938164893617,\n \"acc_stderr,none\": 0.00439408693623272,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.376984126984127,\n \"acc_norm_stderr,none\"\
: 0.017271983959259288,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.508,\n\
\ \"acc_norm_stderr,none\": 0.0316821564314138,\n \"alias\": \" -\
\ leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.26171875,\n \"acc_norm_stderr,none\": 0.027526959754524398,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\": 0.03049155522040555,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_ifeval
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T15-31-50.382131.jsonl'
- config_name: yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T15_31_50.382131
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-31-50.382131.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T15-31-50.382131.jsonl'
---
# Dataset Card for Evaluation run of yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002](https://huggingface.co/yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002-details",
name="yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T15-31-50.382131](https://huggingface.co/datasets/open-llm-leaderboard/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002-details/blob/main/yfzp__Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002/results_2024-09-29T15-31-50.382131.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.7470023980815348,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7652495378927912,
"prompt_level_loose_acc_stderr,none": 0.01823928821343378,
"inst_level_loose_acc,none": 0.8357314148681055,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.366938164893617,
"acc_stderr,none": 0.00439408693623272,
"prompt_level_strict_acc,none": 0.6561922365988909,
"prompt_level_strict_acc_stderr,none": 0.020439793487859976,
"exact_match,none": 0.06570996978851963,
"exact_match_stderr,none": 0.0066807029923575765,
"acc_norm,none": 0.44882604747697497,
"acc_norm_stderr,none": 0.005342481492488792,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4974830758548863,
"acc_norm_stderr,none": 0.006249798665250531,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.788,
"acc_norm_stderr,none": 0.025901884690541156,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5882352941176471,
"acc_norm_stderr,none": 0.03608640563085621,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.031669985030107414,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.029150213374159673,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654849,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401195,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522373,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.031514387611153515,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.031669985030107414,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.03160397514522375,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.030630325944558313,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.541095890410959,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.0301865684645117,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.02869700458739821,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.4943820224719101,
"acc_norm_stderr,none": 0.03757992900475981,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.02553712157454814,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.02820008829631,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422256,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.02590188469054116,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.030696336267394583,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6561922365988909,
"prompt_level_strict_acc_stderr,none": 0.020439793487859976,
"inst_level_strict_acc,none": 0.7470023980815348,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7652495378927912,
"prompt_level_loose_acc_stderr,none": 0.01823928821343378,
"inst_level_loose_acc,none": 0.8357314148681055,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06570996978851963,
"exact_match_stderr,none": 0.006680702992357577,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.10423452768729642,
"exact_match_stderr,none": 0.017467962356999933,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.04065040650406504,
"exact_match_stderr,none": 0.017878907564437455,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632714,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.010714285714285714,
"exact_match_stderr,none": 0.00616368419476161,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.03896103896103896,
"exact_match_stderr,none": 0.015643720451650293,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.16062176165803108,
"exact_match_stderr,none": 0.026499057701397447,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753793,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.366938164893617,
"acc_stderr,none": 0.00439408693623272,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.376984126984127,
"acc_norm_stderr,none": 0.017271983959259288,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.0316821564314138,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.26171875,
"acc_norm_stderr,none": 0.027526959754524398,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.03049155522040555,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.7470023980815348,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7652495378927912,
"prompt_level_loose_acc_stderr,none": 0.01823928821343378,
"inst_level_loose_acc,none": 0.8357314148681055,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.366938164893617,
"acc_stderr,none": 0.00439408693623272,
"prompt_level_strict_acc,none": 0.6561922365988909,
"prompt_level_strict_acc_stderr,none": 0.020439793487859976,
"exact_match,none": 0.06570996978851963,
"exact_match_stderr,none": 0.0066807029923575765,
"acc_norm,none": 0.44882604747697497,
"acc_norm_stderr,none": 0.005342481492488792,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4974830758548863,
"acc_norm_stderr,none": 0.006249798665250531,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.788,
"acc_norm_stderr,none": 0.025901884690541156,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5882352941176471,
"acc_norm_stderr,none": 0.03608640563085621,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.484,
"acc_norm_stderr,none": 0.031669985030107414,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.568,
"acc_norm_stderr,none": 0.03139181076542941,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.029150213374159673,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654849,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.030938207620401195,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.464,
"acc_norm_stderr,none": 0.03160397514522373,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.031514387611153515,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.031669985030107414,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.03160397514522375,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.030630325944558313,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.541095890410959,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.0301865684645117,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.712,
"acc_norm_stderr,none": 0.02869700458739821,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.4943820224719101,
"acc_norm_stderr,none": 0.03757992900475981,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.796,
"acc_norm_stderr,none": 0.02553712157454814,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.02820008829631,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.184,
"acc_norm_stderr,none": 0.02455581299422256,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.212,
"acc_norm_stderr,none": 0.02590188469054116,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.030696336267394583,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.25922818791946306,
"acc_norm_stderr,none": 0.01270453266853576,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.26262626262626265,
"acc_norm_stderr,none": 0.031353050095330855,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2692307692307692,
"acc_norm_stderr,none": 0.019000027142915377,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484575,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6561922365988909,
"prompt_level_strict_acc_stderr,none": 0.020439793487859976,
"inst_level_strict_acc,none": 0.7470023980815348,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.7652495378927912,
"prompt_level_loose_acc_stderr,none": 0.01823928821343378,
"inst_level_loose_acc,none": 0.8357314148681055,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.06570996978851963,
"exact_match_stderr,none": 0.006680702992357577,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.10423452768729642,
"exact_match_stderr,none": 0.017467962356999933,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.04065040650406504,
"exact_match_stderr,none": 0.017878907564437455,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.018199158975632714,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.010714285714285714,
"exact_match_stderr,none": 0.00616368419476161,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.03896103896103896,
"exact_match_stderr,none": 0.015643720451650293,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.16062176165803108,
"exact_match_stderr,none": 0.026499057701397447,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.02962962962962963,
"exact_match_stderr,none": 0.014648038602753793,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.366938164893617,
"acc_stderr,none": 0.00439408693623272,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.376984126984127,
"acc_norm_stderr,none": 0.017271983959259288,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.0316821564314138,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.26171875,
"acc_norm_stderr,none": 0.027526959754524398,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.03049155522040555,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
0xBreath/holistic_bert_dataset | 0xBreath | "2024-09-29T20:12:57Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:37:51Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: input_ids
sequence: int32
- name: token_type_ids
sequence: int8
- name: attention_mask
sequence: int8
splits:
- name: train
num_bytes: 973491
num_examples: 3
download_size: 563400
dataset_size: 973491
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
BedroQuendo/liderbritanico | BedroQuendo | "2024-09-29T15:41:07Z" | 0 | 0 | [
"license:openrail",
"region:us"
] | null | "2024-09-29T15:40:28Z" | ---
license: openrail
---
|
BedroQuendo/liderrusso | BedroQuendo | "2024-09-29T15:42:14Z" | 0 | 0 | [
"license:openrail",
"region:us"
] | null | "2024-09-29T15:41:37Z" | ---
license: openrail
---
|
Sujithanumala/Llama_3.2_1B_IT_dataset | Sujithanumala | "2024-09-29T15:44:19Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:44:18Z" | ---
dataset_info:
features:
- name: output
dtype: string
- name: input
dtype: string
- name: instruction
dtype: string
- name: Llama_IT_output
dtype: string
splits:
- name: train
num_bytes: 1917654
num_examples: 1000
download_size: 1057037
dataset_size: 1917654
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
skbose/indian-english-nptel-v0-tags | skbose | "2024-09-29T15:45:37Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:45:07Z" | ---
dataset_info:
features:
- name: file_name
dtype: string
- name: text
dtype: string
- name: speaker_name
dtype: string
- name: transcription_normalised
dtype: string
- name: utterance_pitch_mean
dtype: float32
- name: utterance_pitch_std
dtype: float32
- name: snr
dtype: float64
- name: c50
dtype: float64
- name: speaking_rate
dtype: float64
- name: phonemes
dtype: string
- name: stoi
dtype: float64
- name: si-sdr
dtype: float64
- name: pesq
dtype: float64
splits:
- name: train
num_bytes: 193663380
num_examples: 435332
- name: test
num_bytes: 48425817
num_examples: 108835
download_size: 164659736
dataset_size: 242089197
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
nguyendv02/new_test | nguyendv02 | "2024-09-29T18:19:22Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:48:16Z" | ---
dataset_info:
features:
- name: region
dtype: string
- name: province_code
dtype: int64
- name: province_name
dtype: string
- name: filename
dtype: string
- name: text
dtype: string
- name: speakerID
dtype: string
- name: gender
dtype: int64
- name: audio
dtype: audio
splits:
- name: train
num_bytes: 51014515813.0
num_examples: 15023
- name: valid
num_bytes: 6334942186.0
num_examples: 1900
- name: test
num_bytes: 6669046477.0
num_examples: 2026
download_size: 59824312000
dataset_size: 64018504476.0
---
# Dataset Card for "new_test"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
skbose/indian-english-nptel-v0-tags-text | skbose | "2024-09-29T15:50:45Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:50:32Z" | ---
dataset_info:
features:
- name: file_name
dtype: string
- name: text
dtype: string
- name: speaker_name
dtype: string
- name: transcription_normalised
dtype: string
- name: utterance_pitch_mean
dtype: float32
- name: utterance_pitch_std
dtype: float32
- name: snr
dtype: float64
- name: c50
dtype: float64
- name: speaking_rate
dtype: string
- name: phonemes
dtype: string
- name: stoi
dtype: float64
- name: si-sdr
dtype: float64
- name: pesq
dtype: float64
- name: noise
dtype: string
- name: reverberation
dtype: string
- name: speech_monotony
dtype: string
- name: sdr_noise
dtype: string
- name: pesq_speech_quality
dtype: string
splits:
- name: train
num_bytes: 244534315
num_examples: 435332
- name: test
num_bytes: 61143332
num_examples: 108835
download_size: 162165216
dataset_size: 305677647
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
Vitrola40/steviewonder | Vitrola40 | "2024-09-29T15:52:17Z" | 0 | 0 | [
"license:openrail",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-09-29T15:51:37Z" | ---
license: openrail
---
|
jlbaker361/ahri16_4_ddpo | jlbaker361 | "2024-09-29T21:45:59Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T15:59:45Z" | ---
dataset_info:
features:
- name: index
dtype: int64
- name: image
dtype: image
splits:
- name: train
num_bytes: 15393395.0
num_examples: 15
download_size: 15395581
dataset_size: 15393395.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Hrishi15/pubchem_molecule_dataset_v2 | Hrishi15 | "2024-09-29T16:07:39Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:07:37Z" | ---
dataset_info:
features:
- name: CID
dtype: int64
- name: Name
dtype: string
- name: Description
dtype: string
- name: MolecularWeight
dtype: string
- name: LogP
dtype: float64
- name: HBondDonorCount
dtype: float64
- name: HBondAcceptorCount
dtype: float64
- name: TPSA
dtype: float64
- name: Complexity
dtype: float64
- name: MeltingPoint
dtype: string
- name: BoilingPoint
dtype: string
- name: Density
dtype: string
- name: Toxicity
dtype: string
- name: DrugInfo
dtype: string
- name: Pharmacodynamics
dtype: string
- name: Solubility
dtype: float64
- name: pKa
dtype: string
splits:
- name: train
num_bytes: 4528063
num_examples: 1000
download_size: 1941192
dataset_size: 4528063
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Self-GRIT/arc_c_eval | Self-GRIT | "2024-09-29T16:16:16Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:16:15Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: question
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: answers
dtype: string
splits:
- name: valid
num_bytes: 375511
num_examples: 1172
download_size: 203814
dataset_size: 375511
configs:
- config_name: default
data_files:
- split: valid
path: data/valid-*
---
|
Self-GRIT/arc_c_random_subsample_eval | Self-GRIT | "2024-09-29T16:16:18Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:16:17Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: question
dtype: string
- name: choices
sequence:
- name: text
dtype: string
- name: label
dtype: string
- name: answers
dtype: string
splits:
- name: valid
num_bytes: 128160.75085324232
num_examples: 400
download_size: 71281
dataset_size: 128160.75085324232
configs:
- config_name: default
data_files:
- split: valid
path: data/valid-*
---
|
rinabuoy/text-summarization-seg | rinabuoy | "2024-09-29T16:19:49Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:19:24Z" | ---
dataset_info:
features:
- name: text_seg
dtype: string
- name: summary_seg
dtype: string
splits:
- name: train
num_bytes: 577861894
num_examples: 84352
download_size: 190936337
dataset_size: 577861894
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
SURESHBEEKHANI/finance_sentiment | SURESHBEEKHANI | "2024-09-29T16:43:13Z" | 0 | 0 | [
"task_categories:text-classification",
"license:mit",
"size_categories:1K<n<10K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"text-classification"
] | "2024-09-29T16:19:28Z" | ---
license: mit
task_categories:
- text-classification
size_categories:
- n<1K
--- |
klcsp/summarize-response2 | klcsp | "2024-09-29T16:21:55Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:21:50Z" | ---
dataset_info:
features:
- name: instructions
dtype: string
- name: target_responses
dtype: string
- name: candidate_responses
dtype: string
- name: model_id
dtype: string
- name: model_sha
dtype: string
splits:
- name: llama3_1_8b_k
num_bytes: 340233
num_examples: 100
download_size: 91931
dataset_size: 340233
configs:
- config_name: default
data_files:
- split: llama3_1_8b_k
path: data/llama3_1_8b_k-*
---
|
adityarra07/FAA_data | adityarra07 | "2024-09-29T16:40:40Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:27:53Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: transcription
dtype: string
splits:
- name: train
num_bytes: 17538341989.70426
num_examples: 343353
- name: test
num_bytes: 4450337596.076739
num_examples: 85839
download_size: 21300390861
dataset_size: 21988679585.781002
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
ServiceNow/BigDoc-FT-v0.11 | ServiceNow | "2024-09-29T21:34:32Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:35:52Z" | ---
dataset_info:
features:
- name: sample_id
dtype: string
- name: dataset_name
dtype: string
- name: annotations
sequence: string
- name: queries
sequence: string
- name: task_name
dtype: string
- name: image
dtype: binary
splits:
- name: train
num_bytes: 132656393988
num_examples: 547926
- name: test
num_bytes: 974700393
num_examples: 5418
- name: hidden_test
num_bytes: 968084278
num_examples: 5418
- name: val
num_bytes: 2023868030
num_examples: 11300
download_size: 133487359871
dataset_size: 136623046689
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: hidden_test
path: data/hidden_test-*
- split: val
path: data/val-*
---
|
lemousehunter/mdp-aug-yolo_datasetv2 | lemousehunter | "2024-09-29T16:43:46Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:38:03Z" | ---
dataset_info:
features:
- name: image
sequence:
sequence:
sequence: uint8
- name: yolo_coords
dtype: string
splits:
- name: train
num_bytes: 13677335332.176048
num_examples: 8470
- name: test
num_bytes: 3420141231.8239517
num_examples: 2118
download_size: 3231544337
dataset_size: 17097476564.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
sajilperfects/llama_medical_dataset_25k | sajilperfects | "2024-09-29T16:56:32Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:56:28Z" | ---
dataset_info:
features:
- name: data
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 11749734
num_examples: 25000
download_size: 2073964
dataset_size: 11749734
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
CodeHima/app350_llama_format | CodeHima | "2024-09-29T16:57:37Z" | 0 | 0 | [
"task_categories:text-generation",
"task_categories:text-classification",
"language:en",
"license:mit",
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"llm",
"conversations",
"llama",
"finetuning",
"privacy-policies",
"dataset"
] | [
"text-generation",
"text-classification"
] | "2024-09-29T16:56:29Z" | ---
language: en
license: mit
tags:
- llm
- conversations
- llama
- finetuning
- privacy-policies
- dataset
datasets:
- CodeHima/APP_350_LLM_Formatted
metrics:
- accuracy
- f1
task_categories:
- text-generation
- text-classification
---
# APP-350 Formatted Dataset for LLM Fine-tuning
## Dataset Summary
The **APP-350** dataset consists of structured conversation pairs formatted for fine-tuning Large Language Models (LLMs) like LLaMA. This dataset includes questions and responses between users and an AI assistant. The dataset is particularly designed for privacy policy analysis and fairness evaluation, allowing models to learn from annotated interactions regarding privacy practices.
The conversations are organized into the following structure:
- **User Prompt:** The user initiates the conversation with a question or request.
- **Assistant Response:** The AI assistant provides a detailed response, including an assessment of the privacy policy clause.
## Intended Use
This dataset is ideal for training and fine-tuning conversational models, particularly those aimed at:
- Privacy policy analysis
- Legal document interpretation
- Fairness evaluation in legal and compliance documents
The dataset can also be used to develop models that specialize in understanding privacy-related practices and enhancing LLM performance in this domain.
## Dataset Structure
Each entry in the dataset is structured as a conversation between a **user** and an **assistant**:
```json
[
{
"content": "Analyze the following clause from a privacy policy and determine if it's fair or unfair...",
"role": "user"
},
{
"content": "This clause is fair. The privacy practices mentioned are: nan.",
"role": "assistant"
}
]
```
Each record contains:
- **content**: The text of the prompt or response.
- **role**: Specifies whether the content is from the 'user' or the 'assistant'.
## Example Entry
```json
{
"content": "How do astronomers determine the original wavelength of light emitted by a celestial body at rest...",
"role": "user"
},
{
"content": "Astronomers make use of the unique spectral fingerprints of elements found in stars...",
"role": "assistant"
}
```
## Collection Process
This dataset was collected from various privacy policy clauses and conversations annotated with fairness labels. The dataset has been structured to reflect user-assistant interactions, making it suitable for training conversational AI systems.
## Licensing
The dataset is made available under the **MIT License**, which allows for flexible use, modification, and distribution of the dataset.
## Citation
If you use this dataset, please cite it as follows:
```
@dataset{app350_llm_formatted,
title = {APP-350 Formatted Dataset for LLM Fine-tuning},
author = {Himanshu Mohanty},
year = {2024},
url = {https://huggingface.co/datasets/CodeHima/APP_350_LLM_Formatted},
license = {MIT}
}
``` |
Holopyolo/RAG-RU-EVAL-BENCHMARK | Holopyolo | "2024-09-29T16:59:25Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T16:59:24Z" | ---
dataset_info:
features:
- name: context
dtype: string
- name: answer
sequence: string
- name: question
dtype: string
splits:
- name: train
num_bytes: 20149060
num_examples: 217
download_size: 416880
dataset_size: 20149060
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "RAG-RU-EVAL-BENCHMARK"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
okinosasuke/phuc | okinosasuke | "2024-09-29T17:03:13Z" | 0 | 0 | [
"license:mit",
"region:us"
] | null | "2024-09-29T17:03:13Z" | ---
license: mit
---
|
klcsp/coding-response2 | klcsp | "2024-09-29T17:04:07Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:04:03Z" | ---
dataset_info:
features:
- name: instructions
dtype: string
- name: target_responses
dtype: string
- name: candidate_responses
dtype: string
- name: model_id
dtype: string
- name: model_sha
dtype: string
splits:
- name: llama3_1_8b_k
num_bytes: 133831
num_examples: 64
download_size: 45375
dataset_size: 133831
configs:
- config_name: default
data_files:
- split: llama3_1_8b_k
path: data/llama3_1_8b_k-*
---
|
klcsp/summarize-eval | klcsp | "2024-09-30T00:46:44Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:07:44Z" | ---
dataset_info:
features:
- name: instructions
dtype: string
- name: target_responses
dtype: string
- name: candidate_responses
dtype: string
- name: model_id
dtype: string
- name: model_sha
dtype: string
- name: eval_prompts
dtype: string
- name: similarity_scores
dtype: float64
- name: precision_scores
dtype: float64
- name: evaluators
dtype: string
- name: dates
dtype: string
splits:
- name: llama3_1_8b_lora
num_bytes: 587902
num_examples: 100
- name: mistral_7b_0_3_lora
num_bytes: 618050
num_examples: 100
- name: gemma7b_k
num_bytes: 619876
num_examples: 100
download_size: 569694
dataset_size: 1825828
configs:
- config_name: default
data_files:
- split: llama3_1_8b_lora
path: data/llama3_1_8b_lora-*
- split: mistral_7b_0_3_lora
path: data/mistral_7b_0_3_lora-*
- split: gemma7b_k
path: data/gemma7b_k-*
---
|
upvantage/claude-context-35k-comma | upvantage | "2024-09-29T17:12:40Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:12:31Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 27307069
num_examples: 17518
download_size: 14031007
dataset_size: 27307069
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
upvantage/claude-context-15k-comma-clean | upvantage | "2024-09-29T17:22:18Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:22:08Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 24523032
num_examples: 15739
download_size: 12586810
dataset_size: 24523032
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
SongTonyLi/llama-1b-preference-merge-mix | SongTonyLi | "2024-09-29T17:26:32Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:25:46Z" | ---
dataset_info:
features:
- name: chosen
list:
- name: content
dtype: string
- name: role
dtype: string
- name: rejected
list:
- name: content
dtype: string
- name: role
dtype: string
- name: prompt
dtype: string
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train_sft
num_bytes: 1234446019
num_examples: 142245
download_size: 634329507
dataset_size: 1234446019
configs:
- config_name: default
data_files:
- split: train_sft
path: data/train_sft-*
---
|
Farjfar/Reddit-SGS_MergedDataset | Farjfar | "2024-09-29T17:28:07Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:28:06Z" | ---
dataset_info:
features:
- name: index
dtype: int64
- name: id
dtype: string
- name: subreddit
dtype: string
- name: year
dtype: int64
- name: body_cleaned
dtype: string
- name: annotation1
dtype: string
- name: annotation2
dtype: string
- name: annotation3
dtype: string
- name: vote_segments
dtype: string
- name: vote_counts
dtype: string
- name: segment
dtype: string
- name: count
dtype: int64
- name: disagreements
dtype: string
- name: reason_disagreement
dtype: string
- name: type_socialgroup
dtype: string
- name: segment_belongs_to
dtype: string
- name: highlight
dtype: bool
splits:
- name: train
num_bytes: 14186700
num_examples: 3685
download_size: 1437767
dataset_size: 14186700
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
sajilperfects/llama_medical_dataset_25k_qa | sajilperfects | "2024-09-29T17:32:05Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:32:03Z" | ---
dataset_info:
features:
- name: data
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 29036421
num_examples: 25000
download_size: 15954672
dataset_size: 29036421
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
FurnTheFurnace/jack-black-dataset | FurnTheFurnace | "2024-09-29T17:40:21Z" | 0 | 0 | [
"license:cc-by-4.0",
"size_categories:n<1K",
"format:audiofolder",
"modality:audio",
"library:datasets",
"library:mlcroissant",
"region:us"
] | null | "2024-09-29T17:38:22Z" | ---
license: cc-by-4.0
---
|
Alarak/librispeech_debug | Alarak | "2024-09-29T17:47:34Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:audio",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:46:28Z" | ---
dataset_info:
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: prompt
dtype: string
- name: gt
dtype: string
- name: source
dtype: string
- name: task
dtype: string
splits:
- name: librispeech_test_clean
num_bytes: 1213503.0
num_examples: 10
download_size: 1153611
dataset_size: 1213503.0
configs:
- config_name: default
data_files:
- split: librispeech_test_clean
path: data/librispeech_test_clean-*
---
|
klcsp/coding-response3 | klcsp | "2024-09-29T17:47:07Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T17:47:02Z" | ---
dataset_info:
features:
- name: instructions
dtype: string
- name: target_responses
dtype: string
- name: candidate_responses
dtype: string
- name: model_id
dtype: string
- name: model_sha
dtype: string
splits:
- name: llama3_1_8b_k
num_bytes: 205480
num_examples: 64
download_size: 50396
dataset_size: 205480
configs:
- config_name: default
data_files:
- split: llama3_1_8b_k
path: data/llama3_1_8b_k-*
---
|
rryisthebest/Coreranker_mined_filtered | rryisthebest | "2024-09-29T18:20:36Z" | 0 | 0 | [
"license:apache-2.0",
"size_categories:100K<n<1M",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:01:07Z" | ---
license: apache-2.0
---
|
DaniilOr/addition | DaniilOr | "2024-09-29T20:07:11Z" | 0 | 0 | [
"task_categories:text2text-generation",
"license:mit",
"size_categories:1M<n<10M",
"format:csv",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | [
"text2text-generation"
] | "2024-09-29T18:13:54Z" | ---
license: mit
task_categories:
- text2text-generation
size_categories:
- 1K<n<10K
--- |
fotiecodes/jarvis-llama3.2-dataset | fotiecodes | "2024-09-29T19:12:22Z" | 0 | 0 | [
"license:mit",
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:14:46Z" | ---
license: mit
dataset_info:
features:
- name: conversations
list:
- name: from
dtype: string
- name: value
dtype: string
splits:
- name: train
num_bytes: 38623
num_examples: 230
download_size: 16557
dataset_size: 38623
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
WhoLetMeCook/CCIRSST2 | WhoLetMeCook | "2024-09-29T18:17:20Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:17:19Z" | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 8251100
num_examples: 67349
download_size: 2772873
dataset_size: 8251100
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
OALL/details_Syed-Hasan-8503__Phi-3-mini-4K-instruct-cpo-simpo | OALL | "2024-09-29T18:17:40Z" | 0 | 0 | [
"region:us"
] | null | "2024-09-29T18:17:29Z" | ---
pretty_name: Evaluation run of Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo](https://huggingface.co/Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo).\n\
\nThe dataset is composed of 136 configuration, each one coresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\"OALL/details_Syed-Hasan-8503__Phi-3-mini-4K-instruct-cpo-simpo\"\
,\n\t\"lighteval_xstory_cloze_ar_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\
\nThese are the [latest results from run 2024-09-29T18:11:33.864987](https://huggingface.co/datasets/OALL/details_Syed-Hasan-8503__Phi-3-mini-4K-instruct-cpo-simpo/blob/main/results_2024-09-29T18-11-33.864987.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc_norm\": 0.375977281871918,\n\
\ \"acc_norm_stderr\": 0.03745292815851962,\n \"acc\": 0.5221707478491066,\n\
\ \"acc_stderr\": 0.01285446962593609\n },\n \"community|acva:Algeria|0\"\
: {\n \"acc_norm\": 0.4256410256410256,\n \"acc_norm_stderr\": 0.03549871080367707\n\
\ },\n \"community|acva:Ancient_Egypt|0\": {\n \"acc_norm\": 0.2761904761904762,\n\
\ \"acc_norm_stderr\": 0.02523199976751755\n },\n \"community|acva:Arab_Empire|0\"\
: {\n \"acc_norm\": 0.6188679245283019,\n \"acc_norm_stderr\": 0.029890609686286623\n\
\ },\n \"community|acva:Arabic_Architecture|0\": {\n \"acc_norm\":\
\ 0.4,\n \"acc_norm_stderr\": 0.0351726229056329\n },\n \"community|acva:Arabic_Art|0\"\
: {\n \"acc_norm\": 0.3384615384615385,\n \"acc_norm_stderr\": 0.03397280032734095\n\
\ },\n \"community|acva:Arabic_Astronomy|0\": {\n \"acc_norm\": 0.4666666666666667,\n\
\ \"acc_norm_stderr\": 0.03581804596782233\n },\n \"community|acva:Arabic_Calligraphy|0\"\
: {\n \"acc_norm\": 0.6509803921568628,\n \"acc_norm_stderr\": 0.029908319306125593\n\
\ },\n \"community|acva:Arabic_Ceremony|0\": {\n \"acc_norm\": 0.5567567567567567,\n\
\ \"acc_norm_stderr\": 0.03662223951330472\n },\n \"community|acva:Arabic_Clothing|0\"\
: {\n \"acc_norm\": 0.5128205128205128,\n \"acc_norm_stderr\": 0.03588610523192215\n\
\ },\n \"community|acva:Arabic_Culture|0\": {\n \"acc_norm\": 0.3384615384615385,\n\
\ \"acc_norm_stderr\": 0.033972800327340964\n },\n \"community|acva:Arabic_Food|0\"\
: {\n \"acc_norm\": 0.5487179487179488,\n \"acc_norm_stderr\": 0.035727098603183925\n\
\ },\n \"community|acva:Arabic_Funeral|0\": {\n \"acc_norm\": 0.3368421052631579,\n\
\ \"acc_norm_stderr\": 0.04874810431502903\n },\n \"community|acva:Arabic_Geography|0\"\
: {\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266236\n\
\ },\n \"community|acva:Arabic_History|0\": {\n \"acc_norm\": 0.4153846153846154,\n\
\ \"acc_norm_stderr\": 0.03538013280575029\n },\n \"community|acva:Arabic_Language_Origin|0\"\
: {\n \"acc_norm\": 0.5578947368421052,\n \"acc_norm_stderr\": 0.051224183891818126\n\
\ },\n \"community|acva:Arabic_Literature|0\": {\n \"acc_norm\": 0.503448275862069,\n\
\ \"acc_norm_stderr\": 0.04166567577101579\n },\n \"community|acva:Arabic_Math|0\"\
: {\n \"acc_norm\": 0.2923076923076923,\n \"acc_norm_stderr\": 0.0326543839374951\n\
\ },\n \"community|acva:Arabic_Medicine|0\": {\n \"acc_norm\": 0.5103448275862069,\n\
\ \"acc_norm_stderr\": 0.04165774775728763\n },\n \"community|acva:Arabic_Music|0\"\
: {\n \"acc_norm\": 0.2446043165467626,\n \"acc_norm_stderr\": 0.03659146222520568\n\
\ },\n \"community|acva:Arabic_Ornament|0\": {\n \"acc_norm\": 0.6153846153846154,\n\
\ \"acc_norm_stderr\": 0.03492896993742303\n },\n \"community|acva:Arabic_Philosophy|0\"\
: {\n \"acc_norm\": 0.593103448275862,\n \"acc_norm_stderr\": 0.04093793981266237\n\
\ },\n \"community|acva:Arabic_Physics_and_Chemistry|0\": {\n \"acc_norm\"\
: 0.6,\n \"acc_norm_stderr\": 0.03517262290563288\n },\n \"community|acva:Arabic_Wedding|0\"\
: {\n \"acc_norm\": 0.40512820512820513,\n \"acc_norm_stderr\": 0.03524577495610962\n\
\ },\n \"community|acva:Bahrain|0\": {\n \"acc_norm\": 0.37777777777777777,\n\
\ \"acc_norm_stderr\": 0.0730911212732345\n },\n \"community|acva:Comoros|0\"\
: {\n \"acc_norm\": 0.4888888888888889,\n \"acc_norm_stderr\": 0.07535922203472523\n\
\ },\n \"community|acva:Egypt_modern|0\": {\n \"acc_norm\": 0.42105263157894735,\n\
\ \"acc_norm_stderr\": 0.050924152299673286\n },\n \"community|acva:InfluenceFromAncientEgypt|0\"\
: {\n \"acc_norm\": 0.558974358974359,\n \"acc_norm_stderr\": 0.0356473293185358\n\
\ },\n \"community|acva:InfluenceFromByzantium|0\": {\n \"acc_norm\"\
: 0.7103448275862069,\n \"acc_norm_stderr\": 0.03780019230438015\n },\n\
\ \"community|acva:InfluenceFromChina|0\": {\n \"acc_norm\": 0.2717948717948718,\n\
\ \"acc_norm_stderr\": 0.031940861870257235\n },\n \"community|acva:InfluenceFromGreece|0\"\
: {\n \"acc_norm\": 0.6256410256410256,\n \"acc_norm_stderr\": 0.03474608430626236\n\
\ },\n \"community|acva:InfluenceFromIslam|0\": {\n \"acc_norm\": 0.41379310344827586,\n\
\ \"acc_norm_stderr\": 0.04104269211806232\n },\n \"community|acva:InfluenceFromPersia|0\"\
: {\n \"acc_norm\": 0.7085714285714285,\n \"acc_norm_stderr\": 0.03444952656229018\n\
\ },\n \"community|acva:InfluenceFromRome|0\": {\n \"acc_norm\": 0.5794871794871795,\n\
\ \"acc_norm_stderr\": 0.035441383893034833\n },\n \"community|acva:Iraq|0\"\
: {\n \"acc_norm\": 0.49411764705882355,\n \"acc_norm_stderr\": 0.05455069703232772\n\
\ },\n \"community|acva:Islam_Education|0\": {\n \"acc_norm\": 0.48205128205128206,\n\
\ \"acc_norm_stderr\": 0.035874770987738294\n },\n \"community|acva:Islam_branches_and_schools|0\"\
: {\n \"acc_norm\": 0.5028571428571429,\n \"acc_norm_stderr\": 0.03790428331834744\n\
\ },\n \"community|acva:Islamic_law_system|0\": {\n \"acc_norm\": 0.4564102564102564,\n\
\ \"acc_norm_stderr\": 0.035761230969912135\n },\n \"community|acva:Jordan|0\"\
: {\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.07216392363431012\n\
\ },\n \"community|acva:Kuwait|0\": {\n \"acc_norm\": 0.28888888888888886,\n\
\ \"acc_norm_stderr\": 0.06832943242540508\n },\n \"community|acva:Lebanon|0\"\
: {\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.06478835438717001\n\
\ },\n \"community|acva:Libya|0\": {\n \"acc_norm\": 0.5555555555555556,\n\
\ \"acc_norm_stderr\": 0.07491109582924915\n },\n \"community|acva:Mauritania|0\"\
: {\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.0752101433090355\n\
\ },\n \"community|acva:Mesopotamia_civilization|0\": {\n \"acc_norm\"\
: 0.567741935483871,\n \"acc_norm_stderr\": 0.03991964576535986\n },\n\
\ \"community|acva:Morocco|0\": {\n \"acc_norm\": 0.3111111111111111,\n\
\ \"acc_norm_stderr\": 0.0697920592732311\n },\n \"community|acva:Oman|0\"\
: {\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.06832943242540507\n\
\ },\n \"community|acva:Palestine|0\": {\n \"acc_norm\": 0.29411764705882354,\n\
\ \"acc_norm_stderr\": 0.04971495616050098\n },\n \"community|acva:Qatar|0\"\
: {\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.07385489458759965\n\
\ },\n \"community|acva:Saudi_Arabia|0\": {\n \"acc_norm\": 0.5897435897435898,\n\
\ \"acc_norm_stderr\": 0.03531493712326671\n },\n \"community|acva:Somalia|0\"\
: {\n \"acc_norm\": 0.4222222222222222,\n \"acc_norm_stderr\": 0.07446027270295806\n\
\ },\n \"community|acva:Sudan|0\": {\n \"acc_norm\": 0.35555555555555557,\n\
\ \"acc_norm_stderr\": 0.07216392363431012\n },\n \"community|acva:Syria|0\"\
: {\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.07491109582924914\n\
\ },\n \"community|acva:Tunisia|0\": {\n \"acc_norm\": 0.37777777777777777,\n\
\ \"acc_norm_stderr\": 0.0730911212732345\n },\n \"community|acva:United_Arab_Emirates|0\"\
: {\n \"acc_norm\": 0.3411764705882353,\n \"acc_norm_stderr\": 0.05172904297361926\n\
\ },\n \"community|acva:Yemen|0\": {\n \"acc_norm\": 0.2,\n \
\ \"acc_norm_stderr\": 0.13333333333333333\n },\n \"community|acva:communication|0\"\
: {\n \"acc_norm\": 0.489010989010989,\n \"acc_norm_stderr\": 0.02623685511821953\n\
\ },\n \"community|acva:computer_and_phone|0\": {\n \"acc_norm\": 0.44745762711864406,\n\
\ \"acc_norm_stderr\": 0.02899913786262446\n },\n \"community|acva:daily_life|0\"\
: {\n \"acc_norm\": 0.20178041543026706,\n \"acc_norm_stderr\": 0.02189429960731361\n\
\ },\n \"community|acva:entertainment|0\": {\n \"acc_norm\": 0.2677966101694915,\n\
\ \"acc_norm_stderr\": 0.025825262229488336\n },\n \"community|alghafa:mcq_exams_test_ar|0\"\
: {\n \"acc_norm\": 0.24236983842010773,\n \"acc_norm_stderr\": 0.018173162868385447\n\
\ },\n \"community|alghafa:meta_ar_dialects|0\": {\n \"acc_norm\":\
\ 0.26932344763670063,\n \"acc_norm_stderr\": 0.006040100948177848\n },\n\
\ \"community|alghafa:meta_ar_msa|0\": {\n \"acc_norm\": 0.2916201117318436,\n\
\ \"acc_norm_stderr\": 0.01520103251252042\n },\n \"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0\"\
: {\n \"acc_norm\": 0.5733333333333334,\n \"acc_norm_stderr\": 0.057495266811327224\n\
\ },\n \"community|alghafa:multiple_choice_grounded_statement_soqal_task|0\"\
: {\n \"acc_norm\": 0.37333333333333335,\n \"acc_norm_stderr\": 0.03962538976206637\n\
\ },\n \"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0\"\
: {\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.038618963090899454\n\
\ },\n \"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0\"\
: {\n \"acc_norm\": 0.6898061288305191,\n \"acc_norm_stderr\": 0.00517366207504383\n\
\ },\n \"community|alghafa:multiple_choice_rating_sentiment_task|0\": {\n\
\ \"acc_norm\": 0.4700583819849875,\n \"acc_norm_stderr\": 0.006446612189915484\n\
\ },\n \"community|alghafa:multiple_choice_sentiment_task|0\": {\n \
\ \"acc_norm\": 0.363953488372093,\n \"acc_norm_stderr\": 0.011604580414859268\n\
\ },\n \"community|arabic_exams|0\": {\n \"acc_norm\": 0.2681564245810056,\n\
\ \"acc_norm_stderr\": 0.01913466400732948\n },\n \"community|arabic_mmlu:abstract_algebra|0\"\
: {\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n\
\ },\n \"community|arabic_mmlu:anatomy|0\": {\n \"acc_norm\": 0.2814814814814815,\n\
\ \"acc_norm_stderr\": 0.038850042458002526\n },\n \"community|arabic_mmlu:astronomy|0\"\
: {\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.03583496176361063\n\
\ },\n \"community|arabic_mmlu:business_ethics|0\": {\n \"acc_norm\"\
: 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"community|arabic_mmlu:clinical_knowledge|0\"\
: {\n \"acc_norm\": 0.3169811320754717,\n \"acc_norm_stderr\": 0.028637235639800918\n\
\ },\n \"community|arabic_mmlu:college_biology|0\": {\n \"acc_norm\"\
: 0.2847222222222222,\n \"acc_norm_stderr\": 0.03773809990686936\n },\n\
\ \"community|arabic_mmlu:college_chemistry|0\": {\n \"acc_norm\": 0.24,\n\
\ \"acc_norm_stderr\": 0.04292346959909283\n },\n \"community|arabic_mmlu:college_computer_science|0\"\
: {\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n\
\ },\n \"community|arabic_mmlu:college_mathematics|0\": {\n \"acc_norm\"\
: 0.19,\n \"acc_norm_stderr\": 0.03942772444036623\n },\n \"community|arabic_mmlu:college_medicine|0\"\
: {\n \"acc_norm\": 0.27167630057803466,\n \"acc_norm_stderr\": 0.03391750322321659\n\
\ },\n \"community|arabic_mmlu:college_physics|0\": {\n \"acc_norm\"\
: 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237655\n },\n\
\ \"community|arabic_mmlu:computer_security|0\": {\n \"acc_norm\": 0.37,\n\
\ \"acc_norm_stderr\": 0.048523658709391\n },\n \"community|arabic_mmlu:conceptual_physics|0\"\
: {\n \"acc_norm\": 0.28085106382978725,\n \"acc_norm_stderr\": 0.02937917046412482\n\
\ },\n \"community|arabic_mmlu:econometrics|0\": {\n \"acc_norm\":\
\ 0.2894736842105263,\n \"acc_norm_stderr\": 0.04266339443159395\n },\n\
\ \"community|arabic_mmlu:electrical_engineering|0\": {\n \"acc_norm\"\
: 0.31724137931034485,\n \"acc_norm_stderr\": 0.038783523721386215\n },\n\
\ \"community|arabic_mmlu:elementary_mathematics|0\": {\n \"acc_norm\"\
: 0.30423280423280424,\n \"acc_norm_stderr\": 0.02369541500946309\n },\n\
\ \"community|arabic_mmlu:formal_logic|0\": {\n \"acc_norm\": 0.30158730158730157,\n\
\ \"acc_norm_stderr\": 0.04104947269903394\n },\n \"community|arabic_mmlu:global_facts|0\"\
: {\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n\
\ },\n \"community|arabic_mmlu:high_school_biology|0\": {\n \"acc_norm\"\
: 0.3,\n \"acc_norm_stderr\": 0.026069362295335144\n },\n \"community|arabic_mmlu:high_school_chemistry|0\"\
: {\n \"acc_norm\": 0.2512315270935961,\n \"acc_norm_stderr\": 0.030516530732694433\n\
\ },\n \"community|arabic_mmlu:high_school_computer_science|0\": {\n \
\ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n \
\ },\n \"community|arabic_mmlu:high_school_european_history|0\": {\n \"\
acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.03192271569548299\n\
\ },\n \"community|arabic_mmlu:high_school_geography|0\": {\n \"acc_norm\"\
: 0.2676767676767677,\n \"acc_norm_stderr\": 0.03154449888270286\n },\n\
\ \"community|arabic_mmlu:high_school_government_and_politics|0\": {\n \
\ \"acc_norm\": 0.29533678756476683,\n \"acc_norm_stderr\": 0.03292296639155141\n\
\ },\n \"community|arabic_mmlu:high_school_macroeconomics|0\": {\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.023234581088428498\n },\n\
\ \"community|arabic_mmlu:high_school_mathematics|0\": {\n \"acc_norm\"\
: 0.2851851851851852,\n \"acc_norm_stderr\": 0.027528599210340492\n },\n\
\ \"community|arabic_mmlu:high_school_microeconomics|0\": {\n \"acc_norm\"\
: 0.24369747899159663,\n \"acc_norm_stderr\": 0.027886828078380572\n },\n\
\ \"community|arabic_mmlu:high_school_physics|0\": {\n \"acc_norm\": 0.2913907284768212,\n\
\ \"acc_norm_stderr\": 0.03710185726119994\n },\n \"community|arabic_mmlu:high_school_psychology|0\"\
: {\n \"acc_norm\": 0.25688073394495414,\n \"acc_norm_stderr\": 0.01873249292834247\n\
\ },\n \"community|arabic_mmlu:high_school_statistics|0\": {\n \"acc_norm\"\
: 0.3101851851851852,\n \"acc_norm_stderr\": 0.03154696285656629\n },\n\
\ \"community|arabic_mmlu:high_school_us_history|0\": {\n \"acc_norm\"\
: 0.2647058823529412,\n \"acc_norm_stderr\": 0.030964517926923403\n },\n\
\ \"community|arabic_mmlu:high_school_world_history|0\": {\n \"acc_norm\"\
: 0.2911392405063291,\n \"acc_norm_stderr\": 0.02957160106575337\n },\n\
\ \"community|arabic_mmlu:human_aging|0\": {\n \"acc_norm\": 0.336322869955157,\n\
\ \"acc_norm_stderr\": 0.031708824268455005\n },\n \"community|arabic_mmlu:human_sexuality|0\"\
: {\n \"acc_norm\": 0.31297709923664124,\n \"acc_norm_stderr\": 0.04066962905677698\n\
\ },\n \"community|arabic_mmlu:international_law|0\": {\n \"acc_norm\"\
: 0.4297520661157025,\n \"acc_norm_stderr\": 0.04519082021319773\n },\n\
\ \"community|arabic_mmlu:jurisprudence|0\": {\n \"acc_norm\": 0.37037037037037035,\n\
\ \"acc_norm_stderr\": 0.04668408033024931\n },\n \"community|arabic_mmlu:logical_fallacies|0\"\
: {\n \"acc_norm\": 0.34355828220858897,\n \"acc_norm_stderr\": 0.03731133519673894\n\
\ },\n \"community|arabic_mmlu:machine_learning|0\": {\n \"acc_norm\"\
: 0.35714285714285715,\n \"acc_norm_stderr\": 0.04547960999764376\n },\n\
\ \"community|arabic_mmlu:management|0\": {\n \"acc_norm\": 0.2912621359223301,\n\
\ \"acc_norm_stderr\": 0.044986763205729224\n },\n \"community|arabic_mmlu:marketing|0\"\
: {\n \"acc_norm\": 0.4017094017094017,\n \"acc_norm_stderr\": 0.03211693751051621\n\
\ },\n \"community|arabic_mmlu:medical_genetics|0\": {\n \"acc_norm\"\
: 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"community|arabic_mmlu:miscellaneous|0\"\
: {\n \"acc_norm\": 0.3167305236270754,\n \"acc_norm_stderr\": 0.01663556642771249\n\
\ },\n \"community|arabic_mmlu:moral_disputes|0\": {\n \"acc_norm\"\
: 0.2976878612716763,\n \"acc_norm_stderr\": 0.024617055388676992\n },\n\
\ \"community|arabic_mmlu:moral_scenarios|0\": {\n \"acc_norm\": 0.26256983240223464,\n\
\ \"acc_norm_stderr\": 0.014716824273017756\n },\n \"community|arabic_mmlu:nutrition|0\"\
: {\n \"acc_norm\": 0.369281045751634,\n \"acc_norm_stderr\": 0.027634176689602656\n\
\ },\n \"community|arabic_mmlu:philosophy|0\": {\n \"acc_norm\": 0.36012861736334406,\n\
\ \"acc_norm_stderr\": 0.027264297599804015\n },\n \"community|arabic_mmlu:prehistory|0\"\
: {\n \"acc_norm\": 0.2808641975308642,\n \"acc_norm_stderr\": 0.025006469755799215\n\
\ },\n \"community|arabic_mmlu:professional_accounting|0\": {\n \"\
acc_norm\": 0.2872340425531915,\n \"acc_norm_stderr\": 0.026992199173064356\n\
\ },\n \"community|arabic_mmlu:professional_law|0\": {\n \"acc_norm\"\
: 0.2907431551499348,\n \"acc_norm_stderr\": 0.011598062372851983\n },\n\
\ \"community|arabic_mmlu:professional_medicine|0\": {\n \"acc_norm\"\
: 0.21691176470588236,\n \"acc_norm_stderr\": 0.025035845227711274\n },\n\
\ \"community|arabic_mmlu:professional_psychology|0\": {\n \"acc_norm\"\
: 0.27941176470588236,\n \"acc_norm_stderr\": 0.018152871051538812\n },\n\
\ \"community|arabic_mmlu:public_relations|0\": {\n \"acc_norm\": 0.3181818181818182,\n\
\ \"acc_norm_stderr\": 0.04461272175910508\n },\n \"community|arabic_mmlu:security_studies|0\"\
: {\n \"acc_norm\": 0.27755102040816326,\n \"acc_norm_stderr\": 0.028666857790274645\n\
\ },\n \"community|arabic_mmlu:sociology|0\": {\n \"acc_norm\": 0.3482587064676617,\n\
\ \"acc_norm_stderr\": 0.033687874661154596\n },\n \"community|arabic_mmlu:us_foreign_policy|0\"\
: {\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n\
\ },\n \"community|arabic_mmlu:virology|0\": {\n \"acc_norm\": 0.28313253012048195,\n\
\ \"acc_norm_stderr\": 0.03507295431370519\n },\n \"community|arabic_mmlu:world_religions|0\"\
: {\n \"acc_norm\": 0.2807017543859649,\n \"acc_norm_stderr\": 0.034462962170884265\n\
\ },\n \"community|arc_challenge_okapi_ar|0\": {\n \"acc_norm\": 0.28706896551724137,\n\
\ \"acc_norm_stderr\": 0.013288475631250104\n },\n \"community|arc_easy_ar|0\"\
: {\n \"acc_norm\": 0.28553299492385786,\n \"acc_norm_stderr\": 0.009291533096115192\n\
\ },\n \"community|boolq_ar|0\": {\n \"acc_norm\": 0.6435582822085889,\n\
\ \"acc_norm_stderr\": 0.008389692158991753\n },\n \"community|copa_ext_ar|0\"\
: {\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.05288198530254015\n\
\ },\n \"community|hellaswag_okapi_ar|0\": {\n \"acc_norm\": 0.2527532439210555,\n\
\ \"acc_norm_stderr\": 0.004538326326529228\n },\n \"community|openbook_qa_ext_ar|0\"\
: {\n \"acc_norm\": 0.3616161616161616,\n \"acc_norm_stderr\": 0.021617296703087114\n\
\ },\n \"community|piqa_ar|0\": {\n \"acc_norm\": 0.5117294053464266,\n\
\ \"acc_norm_stderr\": 0.011678518074484457\n },\n \"community|race_ar|0\"\
: {\n \"acc_norm\": 0.32460945425035503,\n \"acc_norm_stderr\": 0.006669952193906699\n\
\ },\n \"community|sciq_ar|0\": {\n \"acc_norm\": 0.4904522613065327,\n\
\ \"acc_norm_stderr\": 0.01585614533089209\n },\n \"community|toxigen_ar|0\"\
: {\n \"acc_norm\": 0.39572192513368987,\n \"acc_norm_stderr\": 0.016000738844764214\n\
\ },\n \"lighteval|xstory_cloze:ar|0\": {\n \"acc\": 0.5221707478491066,\n\
\ \"acc_stderr\": 0.01285446962593609\n },\n \"community|acva:_average|0\"\
: {\n \"acc_norm\": 0.44487603250172847,\n \"acc_norm_stderr\": 0.04710953548865365\n\
\ },\n \"community|alghafa:_average|0\": {\n \"acc_norm\": 0.4007923774418057,\n\
\ \"acc_norm_stderr\": 0.02204208563035504\n },\n \"community|arabic_mmlu:_average|0\"\
: {\n \"acc_norm\": 0.29928291202036555,\n \"acc_norm_stderr\": 0.034141511310792134\n\
\ }\n}\n```"
repo_url: https://huggingface.co/Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo
configs:
- config_name: community_acva_Algeria_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Algeria|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Algeria|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Ancient_Egypt_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Ancient_Egypt|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Ancient_Egypt|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arab_Empire_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arab_Empire|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arab_Empire|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Architecture_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Architecture|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Architecture|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Art_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Art|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Art|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Astronomy_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Astronomy|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Astronomy|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Calligraphy_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Calligraphy|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Calligraphy|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Ceremony_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Ceremony|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Ceremony|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Clothing_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Clothing|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Clothing|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Culture_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Culture|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Culture|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Food_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Food|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Food|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Funeral_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Funeral|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Funeral|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Geography_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Geography|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Geography|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_History_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_History|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_History|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Language_Origin_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Language_Origin|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Language_Origin|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Literature_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Literature|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Literature|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Math_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Math|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Math|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Medicine_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Medicine|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Medicine|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Music_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Music|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Music|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Ornament_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Ornament|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Ornament|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Philosophy_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Philosophy|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Philosophy|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Physics_and_Chemistry_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Physics_and_Chemistry|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Physics_and_Chemistry|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Arabic_Wedding_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Arabic_Wedding|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Arabic_Wedding|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Bahrain_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Bahrain|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Bahrain|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Comoros_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Comoros|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Comoros|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Egypt_modern_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Egypt_modern|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Egypt_modern|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_InfluenceFromAncientEgypt_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:InfluenceFromAncientEgypt|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:InfluenceFromAncientEgypt|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_InfluenceFromByzantium_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:InfluenceFromByzantium|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:InfluenceFromByzantium|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_InfluenceFromChina_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:InfluenceFromChina|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:InfluenceFromChina|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_InfluenceFromGreece_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:InfluenceFromGreece|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:InfluenceFromGreece|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_InfluenceFromIslam_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:InfluenceFromIslam|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:InfluenceFromIslam|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_InfluenceFromPersia_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:InfluenceFromPersia|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:InfluenceFromPersia|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_InfluenceFromRome_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:InfluenceFromRome|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:InfluenceFromRome|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Iraq_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Iraq|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Iraq|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Islam_Education_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Islam_Education|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Islam_Education|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Islam_branches_and_schools_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Islam_branches_and_schools|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Islam_branches_and_schools|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Islamic_law_system_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Islamic_law_system|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Islamic_law_system|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Jordan_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Jordan|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Jordan|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Kuwait_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Kuwait|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Kuwait|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Lebanon_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Lebanon|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Lebanon|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Libya_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Libya|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Libya|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Mauritania_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Mauritania|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Mauritania|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Mesopotamia_civilization_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Mesopotamia_civilization|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Mesopotamia_civilization|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Morocco_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Morocco|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Morocco|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Oman_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Oman|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Oman|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Palestine_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Palestine|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Palestine|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Qatar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Qatar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Qatar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Saudi_Arabia_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Saudi_Arabia|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Saudi_Arabia|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Somalia_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Somalia|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Somalia|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Sudan_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Sudan|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Sudan|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Syria_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Syria|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Syria|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Tunisia_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Tunisia|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Tunisia|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_United_Arab_Emirates_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:United_Arab_Emirates|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:United_Arab_Emirates|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_Yemen_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:Yemen|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:Yemen|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_communication_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:communication|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:communication|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_computer_and_phone_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:computer_and_phone|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:computer_and_phone|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_daily_life_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:daily_life|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:daily_life|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_acva_entertainment_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|acva:entertainment|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|acva:entertainment|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_mcq_exams_test_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:mcq_exams_test_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:mcq_exams_test_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_meta_ar_dialects_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:meta_ar_dialects|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:meta_ar_dialects|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_meta_ar_msa_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:meta_ar_msa|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:meta_ar_msa|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_multiple_choice_facts_truefalse_balanced_task_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:multiple_choice_facts_truefalse_balanced_task|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:multiple_choice_facts_truefalse_balanced_task|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_multiple_choice_grounded_statement_soqal_task_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:multiple_choice_grounded_statement_soqal_task|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:multiple_choice_grounded_statement_soqal_task|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_multiple_choice_grounded_statement_xglue_mlqa_task_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_multiple_choice_rating_sentiment_no_neutral_task_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_multiple_choice_rating_sentiment_task_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:multiple_choice_rating_sentiment_task|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:multiple_choice_rating_sentiment_task|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_alghafa_multiple_choice_sentiment_task_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|alghafa:multiple_choice_sentiment_task|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|alghafa:multiple_choice_sentiment_task|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_exams_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_exams|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_exams|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_abstract_algebra_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:abstract_algebra|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:abstract_algebra|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_anatomy_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:anatomy|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:anatomy|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_astronomy_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:astronomy|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:astronomy|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_business_ethics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:business_ethics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:business_ethics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_clinical_knowledge_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:clinical_knowledge|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:clinical_knowledge|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_college_biology_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:college_biology|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:college_biology|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_college_chemistry_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:college_chemistry|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:college_chemistry|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_college_computer_science_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:college_computer_science|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:college_computer_science|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_college_mathematics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:college_mathematics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:college_mathematics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_college_medicine_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:college_medicine|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:college_medicine|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_college_physics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:college_physics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:college_physics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_computer_security_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:computer_security|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:computer_security|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_conceptual_physics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:conceptual_physics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:conceptual_physics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_econometrics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:econometrics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:econometrics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_electrical_engineering_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:electrical_engineering|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:electrical_engineering|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_elementary_mathematics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:elementary_mathematics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:elementary_mathematics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_formal_logic_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:formal_logic|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:formal_logic|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_global_facts_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:global_facts|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:global_facts|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_biology_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_biology|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_biology|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_chemistry_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_chemistry|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_chemistry|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_computer_science_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_computer_science|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_computer_science|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_european_history_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_european_history|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_european_history|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_geography_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_geography|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_geography|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_government_and_politics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_government_and_politics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_government_and_politics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_macroeconomics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_macroeconomics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_macroeconomics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_mathematics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_mathematics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_mathematics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_microeconomics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_microeconomics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_microeconomics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_physics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_physics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_physics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_psychology_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_psychology|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_psychology|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_statistics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_statistics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_statistics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_us_history_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_us_history|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_us_history|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_high_school_world_history_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:high_school_world_history|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:high_school_world_history|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_human_aging_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:human_aging|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:human_aging|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_human_sexuality_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:human_sexuality|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:human_sexuality|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_international_law_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:international_law|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:international_law|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_jurisprudence_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:jurisprudence|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:jurisprudence|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_logical_fallacies_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:logical_fallacies|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:logical_fallacies|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_machine_learning_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:machine_learning|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:machine_learning|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_management_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:management|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:management|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_marketing_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:marketing|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:marketing|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_medical_genetics_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:medical_genetics|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:medical_genetics|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_miscellaneous_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:miscellaneous|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:miscellaneous|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_moral_disputes_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:moral_disputes|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:moral_disputes|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_moral_scenarios_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:moral_scenarios|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:moral_scenarios|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_nutrition_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:nutrition|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:nutrition|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_philosophy_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:philosophy|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:philosophy|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_prehistory_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:prehistory|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:prehistory|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_professional_accounting_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:professional_accounting|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:professional_accounting|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_professional_law_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:professional_law|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:professional_law|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_professional_medicine_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:professional_medicine|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:professional_medicine|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_professional_psychology_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:professional_psychology|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:professional_psychology|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_public_relations_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:public_relations|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:public_relations|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_security_studies_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:security_studies|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:security_studies|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_sociology_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:sociology|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:sociology|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_us_foreign_policy_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:us_foreign_policy|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:us_foreign_policy|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_virology_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:virology|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:virology|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arabic_mmlu_world_religions_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arabic_mmlu:world_religions|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arabic_mmlu:world_religions|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arc_challenge_okapi_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arc_challenge_okapi_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arc_challenge_okapi_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_arc_easy_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|arc_easy_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|arc_easy_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_boolq_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|boolq_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|boolq_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_copa_ext_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|copa_ext_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|copa_ext_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_hellaswag_okapi_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|hellaswag_okapi_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|hellaswag_okapi_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_openbook_qa_ext_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|openbook_qa_ext_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|openbook_qa_ext_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_piqa_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|piqa_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|piqa_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_race_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|race_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|race_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_sciq_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|sciq_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|sciq_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: community_toxigen_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_community|toxigen_ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_community|toxigen_ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: lighteval_xstory_cloze_ar_0
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- '**/details_lighteval|xstory_cloze:ar|0_2024-09-29T18-11-33.864987.parquet'
- split: latest
path:
- '**/details_lighteval|xstory_cloze:ar|0_2024-09-29T18-11-33.864987.parquet'
- config_name: results
data_files:
- split: 2024_09_29T18_11_33.864987
path:
- results_2024-09-29T18-11-33.864987.parquet
- split: latest
path:
- results_2024-09-29T18-11-33.864987.parquet
---
# Dataset Card for Evaluation run of Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo](https://huggingface.co/Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo).
The dataset is composed of 136 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("OALL/details_Syed-Hasan-8503__Phi-3-mini-4K-instruct-cpo-simpo",
"lighteval_xstory_cloze_ar_0",
split="train")
```
## Latest results
These are the [latest results from run 2024-09-29T18:11:33.864987](https://huggingface.co/datasets/OALL/details_Syed-Hasan-8503__Phi-3-mini-4K-instruct-cpo-simpo/blob/main/results_2024-09-29T18-11-33.864987.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc_norm": 0.375977281871918,
"acc_norm_stderr": 0.03745292815851962,
"acc": 0.5221707478491066,
"acc_stderr": 0.01285446962593609
},
"community|acva:Algeria|0": {
"acc_norm": 0.4256410256410256,
"acc_norm_stderr": 0.03549871080367707
},
"community|acva:Ancient_Egypt|0": {
"acc_norm": 0.2761904761904762,
"acc_norm_stderr": 0.02523199976751755
},
"community|acva:Arab_Empire|0": {
"acc_norm": 0.6188679245283019,
"acc_norm_stderr": 0.029890609686286623
},
"community|acva:Arabic_Architecture|0": {
"acc_norm": 0.4,
"acc_norm_stderr": 0.0351726229056329
},
"community|acva:Arabic_Art|0": {
"acc_norm": 0.3384615384615385,
"acc_norm_stderr": 0.03397280032734095
},
"community|acva:Arabic_Astronomy|0": {
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.03581804596782233
},
"community|acva:Arabic_Calligraphy|0": {
"acc_norm": 0.6509803921568628,
"acc_norm_stderr": 0.029908319306125593
},
"community|acva:Arabic_Ceremony|0": {
"acc_norm": 0.5567567567567567,
"acc_norm_stderr": 0.03662223951330472
},
"community|acva:Arabic_Clothing|0": {
"acc_norm": 0.5128205128205128,
"acc_norm_stderr": 0.03588610523192215
},
"community|acva:Arabic_Culture|0": {
"acc_norm": 0.3384615384615385,
"acc_norm_stderr": 0.033972800327340964
},
"community|acva:Arabic_Food|0": {
"acc_norm": 0.5487179487179488,
"acc_norm_stderr": 0.035727098603183925
},
"community|acva:Arabic_Funeral|0": {
"acc_norm": 0.3368421052631579,
"acc_norm_stderr": 0.04874810431502903
},
"community|acva:Arabic_Geography|0": {
"acc_norm": 0.593103448275862,
"acc_norm_stderr": 0.04093793981266236
},
"community|acva:Arabic_History|0": {
"acc_norm": 0.4153846153846154,
"acc_norm_stderr": 0.03538013280575029
},
"community|acva:Arabic_Language_Origin|0": {
"acc_norm": 0.5578947368421052,
"acc_norm_stderr": 0.051224183891818126
},
"community|acva:Arabic_Literature|0": {
"acc_norm": 0.503448275862069,
"acc_norm_stderr": 0.04166567577101579
},
"community|acva:Arabic_Math|0": {
"acc_norm": 0.2923076923076923,
"acc_norm_stderr": 0.0326543839374951
},
"community|acva:Arabic_Medicine|0": {
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728763
},
"community|acva:Arabic_Music|0": {
"acc_norm": 0.2446043165467626,
"acc_norm_stderr": 0.03659146222520568
},
"community|acva:Arabic_Ornament|0": {
"acc_norm": 0.6153846153846154,
"acc_norm_stderr": 0.03492896993742303
},
"community|acva:Arabic_Philosophy|0": {
"acc_norm": 0.593103448275862,
"acc_norm_stderr": 0.04093793981266237
},
"community|acva:Arabic_Physics_and_Chemistry|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.03517262290563288
},
"community|acva:Arabic_Wedding|0": {
"acc_norm": 0.40512820512820513,
"acc_norm_stderr": 0.03524577495610962
},
"community|acva:Bahrain|0": {
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.0730911212732345
},
"community|acva:Comoros|0": {
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.07535922203472523
},
"community|acva:Egypt_modern|0": {
"acc_norm": 0.42105263157894735,
"acc_norm_stderr": 0.050924152299673286
},
"community|acva:InfluenceFromAncientEgypt|0": {
"acc_norm": 0.558974358974359,
"acc_norm_stderr": 0.0356473293185358
},
"community|acva:InfluenceFromByzantium|0": {
"acc_norm": 0.7103448275862069,
"acc_norm_stderr": 0.03780019230438015
},
"community|acva:InfluenceFromChina|0": {
"acc_norm": 0.2717948717948718,
"acc_norm_stderr": 0.031940861870257235
},
"community|acva:InfluenceFromGreece|0": {
"acc_norm": 0.6256410256410256,
"acc_norm_stderr": 0.03474608430626236
},
"community|acva:InfluenceFromIslam|0": {
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.04104269211806232
},
"community|acva:InfluenceFromPersia|0": {
"acc_norm": 0.7085714285714285,
"acc_norm_stderr": 0.03444952656229018
},
"community|acva:InfluenceFromRome|0": {
"acc_norm": 0.5794871794871795,
"acc_norm_stderr": 0.035441383893034833
},
"community|acva:Iraq|0": {
"acc_norm": 0.49411764705882355,
"acc_norm_stderr": 0.05455069703232772
},
"community|acva:Islam_Education|0": {
"acc_norm": 0.48205128205128206,
"acc_norm_stderr": 0.035874770987738294
},
"community|acva:Islam_branches_and_schools|0": {
"acc_norm": 0.5028571428571429,
"acc_norm_stderr": 0.03790428331834744
},
"community|acva:Islamic_law_system|0": {
"acc_norm": 0.4564102564102564,
"acc_norm_stderr": 0.035761230969912135
},
"community|acva:Jordan|0": {
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Kuwait|0": {
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.06832943242540508
},
"community|acva:Lebanon|0": {
"acc_norm": 0.24444444444444444,
"acc_norm_stderr": 0.06478835438717001
},
"community|acva:Libya|0": {
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.07491109582924915
},
"community|acva:Mauritania|0": {
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.0752101433090355
},
"community|acva:Mesopotamia_civilization|0": {
"acc_norm": 0.567741935483871,
"acc_norm_stderr": 0.03991964576535986
},
"community|acva:Morocco|0": {
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.0697920592732311
},
"community|acva:Oman|0": {
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.06832943242540507
},
"community|acva:Palestine|0": {
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.04971495616050098
},
"community|acva:Qatar|0": {
"acc_norm": 0.4,
"acc_norm_stderr": 0.07385489458759965
},
"community|acva:Saudi_Arabia|0": {
"acc_norm": 0.5897435897435898,
"acc_norm_stderr": 0.03531493712326671
},
"community|acva:Somalia|0": {
"acc_norm": 0.4222222222222222,
"acc_norm_stderr": 0.07446027270295806
},
"community|acva:Sudan|0": {
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.07216392363431012
},
"community|acva:Syria|0": {
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.07491109582924914
},
"community|acva:Tunisia|0": {
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.0730911212732345
},
"community|acva:United_Arab_Emirates|0": {
"acc_norm": 0.3411764705882353,
"acc_norm_stderr": 0.05172904297361926
},
"community|acva:Yemen|0": {
"acc_norm": 0.2,
"acc_norm_stderr": 0.13333333333333333
},
"community|acva:communication|0": {
"acc_norm": 0.489010989010989,
"acc_norm_stderr": 0.02623685511821953
},
"community|acva:computer_and_phone|0": {
"acc_norm": 0.44745762711864406,
"acc_norm_stderr": 0.02899913786262446
},
"community|acva:daily_life|0": {
"acc_norm": 0.20178041543026706,
"acc_norm_stderr": 0.02189429960731361
},
"community|acva:entertainment|0": {
"acc_norm": 0.2677966101694915,
"acc_norm_stderr": 0.025825262229488336
},
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.24236983842010773,
"acc_norm_stderr": 0.018173162868385447
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.26932344763670063,
"acc_norm_stderr": 0.006040100948177848
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.2916201117318436,
"acc_norm_stderr": 0.01520103251252042
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.5733333333333334,
"acc_norm_stderr": 0.057495266811327224
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.37333333333333335,
"acc_norm_stderr": 0.03962538976206637
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.038618963090899454
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.6898061288305191,
"acc_norm_stderr": 0.00517366207504383
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.4700583819849875,
"acc_norm_stderr": 0.006446612189915484
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.363953488372093,
"acc_norm_stderr": 0.011604580414859268
},
"community|arabic_exams|0": {
"acc_norm": 0.2681564245810056,
"acc_norm_stderr": 0.01913466400732948
},
"community|arabic_mmlu:abstract_algebra|0": {
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"community|arabic_mmlu:anatomy|0": {
"acc_norm": 0.2814814814814815,
"acc_norm_stderr": 0.038850042458002526
},
"community|arabic_mmlu:astronomy|0": {
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.03583496176361063
},
"community|arabic_mmlu:business_ethics|0": {
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"community|arabic_mmlu:clinical_knowledge|0": {
"acc_norm": 0.3169811320754717,
"acc_norm_stderr": 0.028637235639800918
},
"community|arabic_mmlu:college_biology|0": {
"acc_norm": 0.2847222222222222,
"acc_norm_stderr": 0.03773809990686936
},
"community|arabic_mmlu:college_chemistry|0": {
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"community|arabic_mmlu:college_computer_science|0": {
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"community|arabic_mmlu:college_mathematics|0": {
"acc_norm": 0.19,
"acc_norm_stderr": 0.03942772444036623
},
"community|arabic_mmlu:college_medicine|0": {
"acc_norm": 0.27167630057803466,
"acc_norm_stderr": 0.03391750322321659
},
"community|arabic_mmlu:college_physics|0": {
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237655
},
"community|arabic_mmlu:computer_security|0": {
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"community|arabic_mmlu:conceptual_physics|0": {
"acc_norm": 0.28085106382978725,
"acc_norm_stderr": 0.02937917046412482
},
"community|arabic_mmlu:econometrics|0": {
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.04266339443159395
},
"community|arabic_mmlu:electrical_engineering|0": {
"acc_norm": 0.31724137931034485,
"acc_norm_stderr": 0.038783523721386215
},
"community|arabic_mmlu:elementary_mathematics|0": {
"acc_norm": 0.30423280423280424,
"acc_norm_stderr": 0.02369541500946309
},
"community|arabic_mmlu:formal_logic|0": {
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.04104947269903394
},
"community|arabic_mmlu:global_facts|0": {
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"community|arabic_mmlu:high_school_biology|0": {
"acc_norm": 0.3,
"acc_norm_stderr": 0.026069362295335144
},
"community|arabic_mmlu:high_school_chemistry|0": {
"acc_norm": 0.2512315270935961,
"acc_norm_stderr": 0.030516530732694433
},
"community|arabic_mmlu:high_school_computer_science|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"community|arabic_mmlu:high_school_european_history|0": {
"acc_norm": 0.21212121212121213,
"acc_norm_stderr": 0.03192271569548299
},
"community|arabic_mmlu:high_school_geography|0": {
"acc_norm": 0.2676767676767677,
"acc_norm_stderr": 0.03154449888270286
},
"community|arabic_mmlu:high_school_government_and_politics|0": {
"acc_norm": 0.29533678756476683,
"acc_norm_stderr": 0.03292296639155141
},
"community|arabic_mmlu:high_school_macroeconomics|0": {
"acc_norm": 0.3,
"acc_norm_stderr": 0.023234581088428498
},
"community|arabic_mmlu:high_school_mathematics|0": {
"acc_norm": 0.2851851851851852,
"acc_norm_stderr": 0.027528599210340492
},
"community|arabic_mmlu:high_school_microeconomics|0": {
"acc_norm": 0.24369747899159663,
"acc_norm_stderr": 0.027886828078380572
},
"community|arabic_mmlu:high_school_physics|0": {
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.03710185726119994
},
"community|arabic_mmlu:high_school_psychology|0": {
"acc_norm": 0.25688073394495414,
"acc_norm_stderr": 0.01873249292834247
},
"community|arabic_mmlu:high_school_statistics|0": {
"acc_norm": 0.3101851851851852,
"acc_norm_stderr": 0.03154696285656629
},
"community|arabic_mmlu:high_school_us_history|0": {
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.030964517926923403
},
"community|arabic_mmlu:high_school_world_history|0": {
"acc_norm": 0.2911392405063291,
"acc_norm_stderr": 0.02957160106575337
},
"community|arabic_mmlu:human_aging|0": {
"acc_norm": 0.336322869955157,
"acc_norm_stderr": 0.031708824268455005
},
"community|arabic_mmlu:human_sexuality|0": {
"acc_norm": 0.31297709923664124,
"acc_norm_stderr": 0.04066962905677698
},
"community|arabic_mmlu:international_law|0": {
"acc_norm": 0.4297520661157025,
"acc_norm_stderr": 0.04519082021319773
},
"community|arabic_mmlu:jurisprudence|0": {
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04668408033024931
},
"community|arabic_mmlu:logical_fallacies|0": {
"acc_norm": 0.34355828220858897,
"acc_norm_stderr": 0.03731133519673894
},
"community|arabic_mmlu:machine_learning|0": {
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04547960999764376
},
"community|arabic_mmlu:management|0": {
"acc_norm": 0.2912621359223301,
"acc_norm_stderr": 0.044986763205729224
},
"community|arabic_mmlu:marketing|0": {
"acc_norm": 0.4017094017094017,
"acc_norm_stderr": 0.03211693751051621
},
"community|arabic_mmlu:medical_genetics|0": {
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"community|arabic_mmlu:miscellaneous|0": {
"acc_norm": 0.3167305236270754,
"acc_norm_stderr": 0.01663556642771249
},
"community|arabic_mmlu:moral_disputes|0": {
"acc_norm": 0.2976878612716763,
"acc_norm_stderr": 0.024617055388676992
},
"community|arabic_mmlu:moral_scenarios|0": {
"acc_norm": 0.26256983240223464,
"acc_norm_stderr": 0.014716824273017756
},
"community|arabic_mmlu:nutrition|0": {
"acc_norm": 0.369281045751634,
"acc_norm_stderr": 0.027634176689602656
},
"community|arabic_mmlu:philosophy|0": {
"acc_norm": 0.36012861736334406,
"acc_norm_stderr": 0.027264297599804015
},
"community|arabic_mmlu:prehistory|0": {
"acc_norm": 0.2808641975308642,
"acc_norm_stderr": 0.025006469755799215
},
"community|arabic_mmlu:professional_accounting|0": {
"acc_norm": 0.2872340425531915,
"acc_norm_stderr": 0.026992199173064356
},
"community|arabic_mmlu:professional_law|0": {
"acc_norm": 0.2907431551499348,
"acc_norm_stderr": 0.011598062372851983
},
"community|arabic_mmlu:professional_medicine|0": {
"acc_norm": 0.21691176470588236,
"acc_norm_stderr": 0.025035845227711274
},
"community|arabic_mmlu:professional_psychology|0": {
"acc_norm": 0.27941176470588236,
"acc_norm_stderr": 0.018152871051538812
},
"community|arabic_mmlu:public_relations|0": {
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.04461272175910508
},
"community|arabic_mmlu:security_studies|0": {
"acc_norm": 0.27755102040816326,
"acc_norm_stderr": 0.028666857790274645
},
"community|arabic_mmlu:sociology|0": {
"acc_norm": 0.3482587064676617,
"acc_norm_stderr": 0.033687874661154596
},
"community|arabic_mmlu:us_foreign_policy|0": {
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"community|arabic_mmlu:virology|0": {
"acc_norm": 0.28313253012048195,
"acc_norm_stderr": 0.03507295431370519
},
"community|arabic_mmlu:world_religions|0": {
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.034462962170884265
},
"community|arc_challenge_okapi_ar|0": {
"acc_norm": 0.28706896551724137,
"acc_norm_stderr": 0.013288475631250104
},
"community|arc_easy_ar|0": {
"acc_norm": 0.28553299492385786,
"acc_norm_stderr": 0.009291533096115192
},
"community|boolq_ar|0": {
"acc_norm": 0.6435582822085889,
"acc_norm_stderr": 0.008389692158991753
},
"community|copa_ext_ar|0": {
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.05288198530254015
},
"community|hellaswag_okapi_ar|0": {
"acc_norm": 0.2527532439210555,
"acc_norm_stderr": 0.004538326326529228
},
"community|openbook_qa_ext_ar|0": {
"acc_norm": 0.3616161616161616,
"acc_norm_stderr": 0.021617296703087114
},
"community|piqa_ar|0": {
"acc_norm": 0.5117294053464266,
"acc_norm_stderr": 0.011678518074484457
},
"community|race_ar|0": {
"acc_norm": 0.32460945425035503,
"acc_norm_stderr": 0.006669952193906699
},
"community|sciq_ar|0": {
"acc_norm": 0.4904522613065327,
"acc_norm_stderr": 0.01585614533089209
},
"community|toxigen_ar|0": {
"acc_norm": 0.39572192513368987,
"acc_norm_stderr": 0.016000738844764214
},
"lighteval|xstory_cloze:ar|0": {
"acc": 0.5221707478491066,
"acc_stderr": 0.01285446962593609
},
"community|acva:_average|0": {
"acc_norm": 0.44487603250172847,
"acc_norm_stderr": 0.04710953548865365
},
"community|alghafa:_average|0": {
"acc_norm": 0.4007923774418057,
"acc_norm_stderr": 0.02204208563035504
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.29928291202036555,
"acc_norm_stderr": 0.034141511310792134
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
karuna-bhaila/adult_income | karuna-bhaila | "2024-09-29T18:21:48Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:18:23Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: protected
dtype: int64
- name: label
dtype: string
splits:
- name: train
num_bytes: 10056323
num_examples: 30000
- name: augmented
num_bytes: 20573948
num_examples: 60000
- name: test
num_bytes: 5048669
num_examples: 15060
download_size: 4854258
dataset_size: 35678940
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: augmented
path: data/augmented-*
- split: test
path: data/test-*
---
|
open-llm-leaderboard/lemon07r__Gemma-2-Ataraxy-v2a-9B-details | open-llm-leaderboard | "2024-09-29T18:22:48Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:19:31Z" | ---
pretty_name: Evaluation run of lemon07r/Gemma-2-Ataraxy-v2a-9B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [lemon07r/Gemma-2-Ataraxy-v2a-9B](https://huggingface.co/lemon07r/Gemma-2-Ataraxy-v2a-9B)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/lemon07r__Gemma-2-Ataraxy-v2a-9B-details\"\
,\n\tname=\"lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T18-19-31.104072](https://huggingface.co/datasets/open-llm-leaderboard/lemon07r__Gemma-2-Ataraxy-v2a-9B-details/blob/main/lemon07r__Gemma-2-Ataraxy-v2a-9B/results_2024-09-29T18-19-31.104072.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.18585131894484413,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.19304556354916066,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"prompt_level_loose_acc,none\": 0.14048059149722736,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.014953371656822749,\n \"\
acc_norm,none\": 0.4699701647425088,\n \"acc_norm_stderr,none\": 0.0053819999195042914,\n\
\ \"prompt_level_strict_acc,none\": 0.133086876155268,\n \"\
prompt_level_strict_acc_stderr,none\": 0.014617009342904514,\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"acc,none\"\
: 0.35147938829787234,\n \"acc_stderr,none\": 0.0043527222896365585,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.5170977260892207,\n \"acc_norm_stderr,none\"\
: 0.0062662351499772045,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\"\
: 0.824,\n \"acc_norm_stderr,none\": 0.02413349752545711,\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5561497326203209,\n\
\ \"acc_norm_stderr,none\": 0.03642987131924726,\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\"\
: 0.03142556706028128,\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
\n },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\"\
: 0.612,\n \"acc_norm_stderr,none\": 0.030881038748993922,\n \
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"acc_norm,none\": 0.6,\n \
\ \"acc_norm_stderr,none\": 0.03104602102825324,\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\"\
: 0.030562070620993167,\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
\n },\n \"leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\"\
: 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004,\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\"\
: 0.031584653891499004,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.44,\n \"acc_norm_stderr,none\": 0.031457244522235715,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.804,\n \"acc_norm_stderr,none\": 0.02515685731325592,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.476,\n\
\ \"acc_norm_stderr,none\": 0.03164968895968782,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.244,\n \"acc_norm_stderr,none\"\
: 0.027217995464553182,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.4589041095890411,\n \"acc_norm_stderr,none\": 0.041382249050673066,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.608,\n \"acc_norm_stderr,none\": 0.030938207620401195,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.7,\n \"acc_norm_stderr,none\": 0.02904089347757585,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\"\
: 0.03158465389149899,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.6629213483146067,\n \"acc_norm_stderr,none\": 0.03553120966481323,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.656,\n \"acc_norm_stderr,none\"\
: 0.030104503392316385,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.448,\n \"acc_norm_stderr,none\": 0.03151438761115355,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.292,\n \"acc_norm_stderr,none\": 0.02881432040220565,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.296,\n \"acc_norm_stderr,none\"\
: 0.028928939388379638,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\"\
: 0.027065293652239007,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254709,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.33976510067114096,\n \"acc_norm_stderr,none\"\
: 0.013733321675524851,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.3383838383838384,\n\
\ \"acc_norm_stderr,none\": 0.03371124142626303,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.3315018315018315,\n \"acc_norm_stderr,none\"\
: 0.02016484210875768,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.35044642857142855,\n \"acc_norm_stderr,none\": 0.02256651759785534,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.133086876155268,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.014617009342904514,\n \
\ \"inst_level_strict_acc,none\": 0.18585131894484413,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.14048059149722736,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.014953371656822749,\n \
\ \"inst_level_loose_acc,none\": 0.19304556354916066,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_hard\"\
\n },\n \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\n\
\ },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\n \
\ },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.35147938829787234,\n \"acc_stderr,none\"\
: 0.0043527222896365585,\n \"alias\": \" - leaderboard_mmlu_pro\"\n \
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.31613756613756616,\n\
\ \"acc_norm_stderr,none\": 0.016211743615540643,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\":\
\ 0.031686212526223896,\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
\n },\n \"leaderboard_musr_object_placements\": {\n \"\
acc_norm,none\": 0.2734375,\n \"acc_norm_stderr,none\": 0.027912287939448926,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n\
\ \"leaderboard_musr_team_allocation\": {\n \"acc_norm,none\"\
: 0.176,\n \"acc_norm_stderr,none\": 0.024133497525457126,\n \
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n },\n \
\ \"leaderboard\": {\n \"inst_level_strict_acc,none\": 0.18585131894484413,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.19304556354916066,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_loose_acc,none\": 0.14048059149722736,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.014953371656822749,\n \"acc_norm,none\": 0.4699701647425088,\n \
\ \"acc_norm_stderr,none\": 0.0053819999195042914,\n \"prompt_level_strict_acc,none\"\
: 0.133086876155268,\n \"prompt_level_strict_acc_stderr,none\": 0.014617009342904514,\n\
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"acc,none\": 0.35147938829787234,\n \"acc_stderr,none\": 0.0043527222896365585,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \
\ \"acc_norm,none\": 0.5170977260892207,\n \"acc_norm_stderr,none\": 0.0062662351499772045,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.824,\n \"acc_norm_stderr,none\": 0.02413349752545711,\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5561497326203209,\n\
\ \"acc_norm_stderr,none\": 0.03642987131924726,\n \"alias\": \" \
\ - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028128,\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.612,\n \"acc_norm_stderr,none\": 0.030881038748993922,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.6,\n \"acc_norm_stderr,none\": 0.03104602102825324,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.030562070620993167,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.44,\n \"acc_norm_stderr,none\": 0.031457244522235715,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.804,\n \"acc_norm_stderr,none\": 0.02515685731325592,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.476,\n \"acc_norm_stderr,none\": 0.03164968895968782,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.244,\n \"acc_norm_stderr,none\": 0.027217995464553182,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.4589041095890411,\n \"acc_norm_stderr,none\"\
: 0.041382249050673066,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.608,\n \"acc_norm_stderr,none\": 0.030938207620401195,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.7,\n\
\ \"acc_norm_stderr,none\": 0.02904089347757585,\n \"alias\": \" \
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.54,\n \"acc_norm_stderr,none\": 0.03158465389149899,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.6629213483146067,\n\
\ \"acc_norm_stderr,none\": 0.03553120966481323,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.656,\n \"acc_norm_stderr,none\": 0.030104503392316385,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.448,\n \
\ \"acc_norm_stderr,none\": 0.03151438761115355,\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.292,\n \"acc_norm_stderr,none\": 0.02881432040220565,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.296,\n \"acc_norm_stderr,none\": 0.028928939388379638,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652239007,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.488,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254709,\n \"alias\": \" \
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.33976510067114096,\n \"acc_norm_stderr,none\": 0.013733321675524851,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.3383838383838384,\n \"acc_norm_stderr,none\"\
: 0.03371124142626303,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.3315018315018315,\n\
\ \"acc_norm_stderr,none\": 0.02016484210875768,\n \"alias\": \" \
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.35044642857142855,\n \"acc_norm_stderr,none\":\
\ 0.02256651759785534,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.133086876155268,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.014617009342904514,\n \
\ \"inst_level_strict_acc,none\": 0.18585131894484413,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.14048059149722736,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.014953371656822749,\n \"inst_level_loose_acc,none\"\
: 0.19304556354916066,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\n },\n \
\ \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_geometry_hard\"\
\n },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
\n },\n \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_precalculus_hard\"\
\n },\n \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.35147938829787234,\n\
\ \"acc_stderr,none\": 0.0043527222896365585,\n \"alias\": \" - leaderboard_mmlu_pro\"\
\n },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.31613756613756616,\n\
\ \"acc_norm_stderr,none\": 0.016211743615540643,\n \"alias\": \"\
\ - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896,\n\
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.2734375,\n \"acc_norm_stderr,none\": 0.027912287939448926,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.176,\n \"acc_norm_stderr,none\": 0.024133497525457126,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/lemon07r/Gemma-2-Ataraxy-v2a-9B
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_ifeval
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-19-31.104072.jsonl'
- config_name: lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T18_19_31.104072
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-19-31.104072.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-19-31.104072.jsonl'
---
# Dataset Card for Evaluation run of lemon07r/Gemma-2-Ataraxy-v2a-9B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [lemon07r/Gemma-2-Ataraxy-v2a-9B](https://huggingface.co/lemon07r/Gemma-2-Ataraxy-v2a-9B)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/lemon07r__Gemma-2-Ataraxy-v2a-9B-details",
name="lemon07r__Gemma-2-Ataraxy-v2a-9B__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T18-19-31.104072](https://huggingface.co/datasets/open-llm-leaderboard/lemon07r__Gemma-2-Ataraxy-v2a-9B-details/blob/main/lemon07r__Gemma-2-Ataraxy-v2a-9B/results_2024-09-29T18-19-31.104072.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.18585131894484413,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.19304556354916066,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.14048059149722736,
"prompt_level_loose_acc_stderr,none": 0.014953371656822749,
"acc_norm,none": 0.4699701647425088,
"acc_norm_stderr,none": 0.0053819999195042914,
"prompt_level_strict_acc,none": 0.133086876155268,
"prompt_level_strict_acc_stderr,none": 0.014617009342904514,
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"acc,none": 0.35147938829787234,
"acc_stderr,none": 0.0043527222896365585,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5170977260892207,
"acc_norm_stderr,none": 0.0062662351499772045,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.824,
"acc_norm_stderr,none": 0.02413349752545711,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5561497326203209,
"acc_norm_stderr,none": 0.03642987131924726,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028128,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993922,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.03104602102825324,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.030562070620993167,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.031457244522235715,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.804,
"acc_norm_stderr,none": 0.02515685731325592,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968782,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.027217995464553182,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.4589041095890411,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401195,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.7,
"acc_norm_stderr,none": 0.02904089347757585,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.03158465389149899,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.6629213483146067,
"acc_norm_stderr,none": 0.03553120966481323,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.656,
"acc_norm_stderr,none": 0.030104503392316385,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115355,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.292,
"acc_norm_stderr,none": 0.02881432040220565,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.296,
"acc_norm_stderr,none": 0.028928939388379638,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652239007,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.33976510067114096,
"acc_norm_stderr,none": 0.013733321675524851,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.3383838383838384,
"acc_norm_stderr,none": 0.03371124142626303,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.3315018315018315,
"acc_norm_stderr,none": 0.02016484210875768,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.35044642857142855,
"acc_norm_stderr,none": 0.02256651759785534,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.133086876155268,
"prompt_level_strict_acc_stderr,none": 0.014617009342904514,
"inst_level_strict_acc,none": 0.18585131894484413,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.14048059149722736,
"prompt_level_loose_acc_stderr,none": 0.014953371656822749,
"inst_level_loose_acc,none": 0.19304556354916066,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.35147938829787234,
"acc_stderr,none": 0.0043527222896365585,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.31613756613756616,
"acc_norm_stderr,none": 0.016211743615540643,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.2734375,
"acc_norm_stderr,none": 0.027912287939448926,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457126,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.18585131894484413,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.19304556354916066,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.14048059149722736,
"prompt_level_loose_acc_stderr,none": 0.014953371656822749,
"acc_norm,none": 0.4699701647425088,
"acc_norm_stderr,none": 0.0053819999195042914,
"prompt_level_strict_acc,none": 0.133086876155268,
"prompt_level_strict_acc_stderr,none": 0.014617009342904514,
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"acc,none": 0.35147938829787234,
"acc_stderr,none": 0.0043527222896365585,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.5170977260892207,
"acc_norm_stderr,none": 0.0062662351499772045,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.824,
"acc_norm_stderr,none": 0.02413349752545711,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5561497326203209,
"acc_norm_stderr,none": 0.03642987131924726,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028128,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.612,
"acc_norm_stderr,none": 0.030881038748993922,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.03104602102825324,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.030562070620993167,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.44,
"acc_norm_stderr,none": 0.031457244522235715,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.804,
"acc_norm_stderr,none": 0.02515685731325592,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.476,
"acc_norm_stderr,none": 0.03164968895968782,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.027217995464553182,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.4589041095890411,
"acc_norm_stderr,none": 0.041382249050673066,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401195,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.7,
"acc_norm_stderr,none": 0.02904089347757585,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.03158465389149899,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.6629213483146067,
"acc_norm_stderr,none": 0.03553120966481323,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.656,
"acc_norm_stderr,none": 0.030104503392316385,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.448,
"acc_norm_stderr,none": 0.03151438761115355,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.292,
"acc_norm_stderr,none": 0.02881432040220565,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.296,
"acc_norm_stderr,none": 0.028928939388379638,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652239007,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.33976510067114096,
"acc_norm_stderr,none": 0.013733321675524851,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.3383838383838384,
"acc_norm_stderr,none": 0.03371124142626303,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.3315018315018315,
"acc_norm_stderr,none": 0.02016484210875768,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.35044642857142855,
"acc_norm_stderr,none": 0.02256651759785534,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.133086876155268,
"prompt_level_strict_acc_stderr,none": 0.014617009342904514,
"inst_level_strict_acc,none": 0.18585131894484413,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.14048059149722736,
"prompt_level_loose_acc_stderr,none": 0.014953371656822749,
"inst_level_loose_acc,none": 0.19304556354916066,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.35147938829787234,
"acc_stderr,none": 0.0043527222896365585,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.31613756613756616,
"acc_norm_stderr,none": 0.016211743615540643,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.2734375,
"acc_norm_stderr,none": 0.027912287939448926,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.176,
"acc_norm_stderr,none": 0.024133497525457126,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
alabulei/aelf-test | alabulei | "2024-09-29T18:24:18Z" | 0 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-09-29T18:23:06Z" | ---
license: apache-2.0
---
|
DaniilOr/copy | DaniilOr | "2024-09-29T20:09:43Z" | 0 | 0 | [
"license:mit",
"size_categories:1M<n<10M",
"format:csv",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:25:01Z" | ---
license: mit
---
|
open-llm-leaderboard/Cran-May__T.E-8.1-details | open-llm-leaderboard | "2024-09-29T18:29:30Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:25:59Z" | ---
pretty_name: Evaluation run of Cran-May/T.E-8.1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [Cran-May/T.E-8.1](https://huggingface.co/Cran-May/T.E-8.1)\nThe dataset is composed\
\ of 38 configuration(s), each one corresponding to one of the evaluated task.\n\
\nThe dataset has been created from 1 run(s). Each run can be found as a specific\
\ split in each configuration, the split being named using the timestamp of the\
\ run.The \"train\" split is always pointing to the latest results.\n\nAn additional\
\ configuration \"results\" store all the aggregated results of the run.\n\nTo load\
\ the details from a run, you can for instance do the following:\n```python\nfrom\
\ datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/Cran-May__T.E-8.1-details\"\
,\n\tname=\"Cran-May__T.E-8.1__leaderboard_bbh_boolean_expressions\",\n\tsplit=\"\
latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results from run\
\ 2024-09-29T18-25-58.538578](https://huggingface.co/datasets/open-llm-leaderboard/Cran-May__T.E-8.1-details/blob/main/Cran-May__T.E-8.1/results_2024-09-29T18-25-58.538578.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.7517985611510791,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.6635859519408502,\n \"prompt_level_strict_acc_stderr,none\": 0.02033240600470129,\n\
\ \"acc,none\": 0.4432347074468085,\n \"acc_stderr,none\"\
: 0.004528997637022319,\n \"acc_norm,none\": 0.5078479699053055,\n \
\ \"acc_norm_stderr,none\": 0.005366506638519437,\n \"prompt_level_loose_acc,none\"\
: 0.6746765249537893,\n \"prompt_level_loose_acc_stderr,none\": 0.020160839912603132,\n\
\ \"inst_level_loose_acc,none\": 0.7613908872901679,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\":\
\ 0.0634441087613293,\n \"exact_match_stderr,none\": 0.00659150584844867,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.555806283631314,\n \"acc_norm_stderr,none\"\
: 0.006190036548895745,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\"\
: 0.876,\n \"acc_norm_stderr,none\": 0.02088638225867326,\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5561497326203209,\n\
\ \"acc_norm_stderr,none\": 0.03642987131924727,\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\"\
: 0.031235856237014553,\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
\n },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\"\
: 0.628,\n \"acc_norm_stderr,none\": 0.030630325944558317,\n \
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"acc_norm,none\": 0.608,\n \
\ \"acc_norm_stderr,none\": 0.030938207620401195,\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\"\
: 0.03166998503010742,\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
\n },\n \"leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\"\
: 0.552,\n \"acc_norm_stderr,none\": 0.031514387611153515,\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\"\
: 0.03142556706028128,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.784,\n \"acc_norm_stderr,none\": 0.02607865766373272,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.644,\n \"acc_norm_stderr,none\": 0.03034368065715321,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.7,\n\
\ \"acc_norm_stderr,none\": 0.02904089347757586,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\"\
: 0.030630325944558313,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.5753424657534246,\n \"acc_norm_stderr,none\": 0.04104862657656194,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.64,\n \"acc_norm_stderr,none\": 0.030418764025174988,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.596,\n \"acc_norm_stderr,none\": 0.031096688184825295,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\"\
: 0.0316364895315444,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.7247191011235955,\n \"acc_norm_stderr,none\": 0.03357269922538226,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.724,\n \"acc_norm_stderr,none\"\
: 0.028328537274211363,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.5,\n \"acc_norm_stderr,none\": 0.031686212526223896,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.232,\n \"acc_norm_stderr,none\": 0.026750070374865167,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\"\
: 0.0236928132054926,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\"\
: 0.02857695873043741,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.56,\n \"acc_norm_stderr,none\": 0.031457244522235646,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.31291946308724833,\n \"acc_norm_stderr,none\"\
: 0.013437044976303247,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.2727272727272727,\n\
\ \"acc_norm_stderr,none\": 0.03173071239071724,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.32051282051282054,\n \"acc_norm_stderr,none\"\
: 0.019990105460697117,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.32142857142857145,\n \"acc_norm_stderr,none\": 0.022089519157170164,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.6635859519408502,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.02033240600470129,\n \
\ \"inst_level_strict_acc,none\": 0.7517985611510791,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.6746765249537893,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.020160839912603132,\n \
\ \"inst_level_loose_acc,none\": 0.7613908872901679,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.0634441087613293,\n\
\ \"exact_match_stderr,none\": 0.00659150584844867,\n \"alias\"\
: \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.12052117263843648,\n \"exact_match_stderr,none\"\
: 0.0186115976549282,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.016260162601626018,\n \"exact_match_stderr,none\"\
: 0.011450452676925661,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.030303030303030304,\n \"exact_match_stderr,none\": 0.014977019714308249,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.014285714285714285,\n \"exact_match_stderr,none\": 0.007104350893915322,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.045454545454545456,\n \"exact_match_stderr,none\": 0.016839967582612473,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.12435233160621761,\n \"exact_match_stderr,none\": 0.02381447708659356,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.044444444444444446,\n \"exact_match_stderr,none\": 0.01780263602032456,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n\
\ \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.4432347074468085,\n\
\ \"acc_stderr,none\": 0.004528997637022319,\n \"alias\":\
\ \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.4497354497354497,\n \"acc_norm_stderr,none\"\
: 0.017907859743332224,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\"\
: 0.54,\n \"acc_norm_stderr,none\": 0.031584653891499004,\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"\
leaderboard_musr_object_placements\": {\n \"acc_norm,none\": 0.3515625,\n\
\ \"acc_norm_stderr,none\": 0.029899590697818237,\n \"alias\"\
: \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\"\
: 0.031584653891499,\n \"alias\": \" - leaderboard_musr_team_allocation\"\
\n }\n },\n \"leaderboard\": {\n \"inst_level_strict_acc,none\"\
: 0.7517985611510791,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_strict_acc,none\": 0.6635859519408502,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.02033240600470129,\n \"acc,none\": 0.4432347074468085,\n \"acc_stderr,none\"\
: 0.004528997637022319,\n \"acc_norm,none\": 0.5078479699053055,\n \
\ \"acc_norm_stderr,none\": 0.005366506638519437,\n \"prompt_level_loose_acc,none\"\
: 0.6746765249537893,\n \"prompt_level_loose_acc_stderr,none\": 0.020160839912603132,\n\
\ \"inst_level_loose_acc,none\": 0.7613908872901679,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"exact_match,none\": 0.0634441087613293,\n \"exact_match_stderr,none\"\
: 0.00659150584844867,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.555806283631314,\n \"acc_norm_stderr,none\"\
: 0.006190036548895745,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\": 0.876,\n\
\ \"acc_norm_stderr,none\": 0.02088638225867326,\n \"alias\": \" \
\ - leaderboard_bbh_boolean_expressions\"\n },\n \"leaderboard_bbh_causal_judgement\"\
: {\n \"acc_norm,none\": 0.5561497326203209,\n \"acc_norm_stderr,none\"\
: 0.03642987131924727,\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
\n },\n \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014553,\n \"alias\"\
: \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.628,\n \"acc_norm_stderr,none\": 0.030630325944558317,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.608,\n \"acc_norm_stderr,none\": 0.030938207620401195,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010742,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.552,\n \"acc_norm_stderr,none\": 0.031514387611153515,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028128,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.784,\n \"acc_norm_stderr,none\": 0.02607865766373272,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.644,\n \"acc_norm_stderr,none\": 0.03034368065715321,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.7,\n \"acc_norm_stderr,none\": 0.02904089347757586,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\": 0.030630325944558313,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.5753424657534246,\n \"acc_norm_stderr,none\"\
: 0.04104862657656194,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.64,\n \"acc_norm_stderr,none\": 0.030418764025174988,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.596,\n\
\ \"acc_norm_stderr,none\": 0.031096688184825295,\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\": 0.0316364895315444,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.7247191011235955,\n\
\ \"acc_norm_stderr,none\": 0.03357269922538226,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.724,\n \"acc_norm_stderr,none\": 0.028328537274211363,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.5,\n \
\ \"acc_norm_stderr,none\": 0.031686212526223896,\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.232,\n \"acc_norm_stderr,none\": 0.026750070374865167,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.0236928132054926,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043741,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.56,\n\
\ \"acc_norm_stderr,none\": 0.031457244522235646,\n \"alias\": \"\
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.31291946308724833,\n \"acc_norm_stderr,none\": 0.013437044976303247,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.2727272727272727,\n \"acc_norm_stderr,none\"\
: 0.03173071239071724,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.32051282051282054,\n\
\ \"acc_norm_stderr,none\": 0.019990105460697117,\n \"alias\": \"\
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.32142857142857145,\n \"acc_norm_stderr,none\"\
: 0.022089519157170164,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.6635859519408502,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.02033240600470129,\n \
\ \"inst_level_strict_acc,none\": 0.7517985611510791,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.6746765249537893,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.020160839912603132,\n \"inst_level_loose_acc,none\"\
: 0.7613908872901679,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.0634441087613293,\n \"exact_match_stderr,none\"\
: 0.00659150584844867,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.12052117263843648,\n\
\ \"exact_match_stderr,none\": 0.0186115976549282,\n \"alias\": \"\
\ - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.016260162601626018,\n \"exact_match_stderr,none\"\
: 0.011450452676925661,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.030303030303030304,\n \"exact_match_stderr,none\": 0.014977019714308249,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.014285714285714285,\n \"exact_match_stderr,none\"\
: 0.007104350893915322,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.045454545454545456,\n \"exact_match_stderr,none\": 0.016839967582612473,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.12435233160621761,\n \"exact_match_stderr,none\"\
: 0.02381447708659356,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.044444444444444446,\n \"exact_match_stderr,none\": 0.01780263602032456,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.4432347074468085,\n \"acc_stderr,none\": 0.004528997637022319,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.4497354497354497,\n \"acc_norm_stderr,none\"\
: 0.017907859743332224,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.54,\n\
\ \"acc_norm_stderr,none\": 0.031584653891499004,\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.3515625,\n \"acc_norm_stderr,none\": 0.029899590697818237,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/Cran-May/T.E-8.1
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_ifeval
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-25-58.538578.jsonl'
- config_name: Cran-May__T.E-8.1__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T18_25_58.538578
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-25-58.538578.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-25-58.538578.jsonl'
---
# Dataset Card for Evaluation run of Cran-May/T.E-8.1
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Cran-May/T.E-8.1](https://huggingface.co/Cran-May/T.E-8.1)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/Cran-May__T.E-8.1-details",
name="Cran-May__T.E-8.1__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T18-25-58.538578](https://huggingface.co/datasets/open-llm-leaderboard/Cran-May__T.E-8.1-details/blob/main/Cran-May__T.E-8.1/results_2024-09-29T18-25-58.538578.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.7517985611510791,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.6635859519408502,
"prompt_level_strict_acc_stderr,none": 0.02033240600470129,
"acc,none": 0.4432347074468085,
"acc_stderr,none": 0.004528997637022319,
"acc_norm,none": 0.5078479699053055,
"acc_norm_stderr,none": 0.005366506638519437,
"prompt_level_loose_acc,none": 0.6746765249537893,
"prompt_level_loose_acc_stderr,none": 0.020160839912603132,
"inst_level_loose_acc,none": 0.7613908872901679,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.0634441087613293,
"exact_match_stderr,none": 0.00659150584844867,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.555806283631314,
"acc_norm_stderr,none": 0.006190036548895745,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.876,
"acc_norm_stderr,none": 0.02088638225867326,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5561497326203209,
"acc_norm_stderr,none": 0.03642987131924727,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014553,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.030630325944558317,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401195,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010742,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.031514387611153515,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028128,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.784,
"acc_norm_stderr,none": 0.02607865766373272,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.644,
"acc_norm_stderr,none": 0.03034368065715321,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.7,
"acc_norm_stderr,none": 0.02904089347757586,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.030630325944558313,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.5753424657534246,
"acc_norm_stderr,none": 0.04104862657656194,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.64,
"acc_norm_stderr,none": 0.030418764025174988,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.031096688184825295,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.0316364895315444,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.7247191011235955,
"acc_norm_stderr,none": 0.03357269922538226,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.724,
"acc_norm_stderr,none": 0.028328537274211363,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.232,
"acc_norm_stderr,none": 0.026750070374865167,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.0236928132054926,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043741,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.031457244522235646,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.31291946308724833,
"acc_norm_stderr,none": 0.013437044976303247,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071724,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.32051282051282054,
"acc_norm_stderr,none": 0.019990105460697117,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.32142857142857145,
"acc_norm_stderr,none": 0.022089519157170164,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6635859519408502,
"prompt_level_strict_acc_stderr,none": 0.02033240600470129,
"inst_level_strict_acc,none": 0.7517985611510791,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.6746765249537893,
"prompt_level_loose_acc_stderr,none": 0.020160839912603132,
"inst_level_loose_acc,none": 0.7613908872901679,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0634441087613293,
"exact_match_stderr,none": 0.00659150584844867,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.12052117263843648,
"exact_match_stderr,none": 0.0186115976549282,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925661,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308249,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.007104350893915322,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.016839967582612473,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.12435233160621761,
"exact_match_stderr,none": 0.02381447708659356,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.044444444444444446,
"exact_match_stderr,none": 0.01780263602032456,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.4432347074468085,
"acc_stderr,none": 0.004528997637022319,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.4497354497354497,
"acc_norm_stderr,none": 0.017907859743332224,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.3515625,
"acc_norm_stderr,none": 0.029899590697818237,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.7517985611510791,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.6635859519408502,
"prompt_level_strict_acc_stderr,none": 0.02033240600470129,
"acc,none": 0.4432347074468085,
"acc_stderr,none": 0.004528997637022319,
"acc_norm,none": 0.5078479699053055,
"acc_norm_stderr,none": 0.005366506638519437,
"prompt_level_loose_acc,none": 0.6746765249537893,
"prompt_level_loose_acc_stderr,none": 0.020160839912603132,
"inst_level_loose_acc,none": 0.7613908872901679,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.0634441087613293,
"exact_match_stderr,none": 0.00659150584844867,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.555806283631314,
"acc_norm_stderr,none": 0.006190036548895745,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.876,
"acc_norm_stderr,none": 0.02088638225867326,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5561497326203209,
"acc_norm_stderr,none": 0.03642987131924727,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014553,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.628,
"acc_norm_stderr,none": 0.030630325944558317,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.608,
"acc_norm_stderr,none": 0.030938207620401195,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010742,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.552,
"acc_norm_stderr,none": 0.031514387611153515,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028128,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.784,
"acc_norm_stderr,none": 0.02607865766373272,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.644,
"acc_norm_stderr,none": 0.03034368065715321,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.7,
"acc_norm_stderr,none": 0.02904089347757586,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.030630325944558313,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.5753424657534246,
"acc_norm_stderr,none": 0.04104862657656194,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.64,
"acc_norm_stderr,none": 0.030418764025174988,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.596,
"acc_norm_stderr,none": 0.031096688184825295,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.0316364895315444,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.7247191011235955,
"acc_norm_stderr,none": 0.03357269922538226,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.724,
"acc_norm_stderr,none": 0.028328537274211363,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.5,
"acc_norm_stderr,none": 0.031686212526223896,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.232,
"acc_norm_stderr,none": 0.026750070374865167,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.0236928132054926,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043741,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.031457244522235646,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.31291946308724833,
"acc_norm_stderr,none": 0.013437044976303247,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.2727272727272727,
"acc_norm_stderr,none": 0.03173071239071724,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.32051282051282054,
"acc_norm_stderr,none": 0.019990105460697117,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.32142857142857145,
"acc_norm_stderr,none": 0.022089519157170164,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.6635859519408502,
"prompt_level_strict_acc_stderr,none": 0.02033240600470129,
"inst_level_strict_acc,none": 0.7517985611510791,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.6746765249537893,
"prompt_level_loose_acc_stderr,none": 0.020160839912603132,
"inst_level_loose_acc,none": 0.7613908872901679,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.0634441087613293,
"exact_match_stderr,none": 0.00659150584844867,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.12052117263843648,
"exact_match_stderr,none": 0.0186115976549282,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.016260162601626018,
"exact_match_stderr,none": 0.011450452676925661,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.030303030303030304,
"exact_match_stderr,none": 0.014977019714308249,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.014285714285714285,
"exact_match_stderr,none": 0.007104350893915322,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.045454545454545456,
"exact_match_stderr,none": 0.016839967582612473,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.12435233160621761,
"exact_match_stderr,none": 0.02381447708659356,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.044444444444444446,
"exact_match_stderr,none": 0.01780263602032456,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.4432347074468085,
"acc_stderr,none": 0.004528997637022319,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.4497354497354497,
"acc_norm_stderr,none": 0.017907859743332224,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.54,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.3515625,
"acc_norm_stderr,none": 0.029899590697818237,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
DaniilOr/reverse | DaniilOr | "2024-09-29T20:14:02Z" | 0 | 0 | [
"license:mit",
"size_categories:1M<n<10M",
"format:csv",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:26:20Z" | ---
license: mit
---
|
naskimed/testing_data | naskimed | "2024-09-29T18:42:33Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:42:30Z" | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 4599554
num_examples: 2013
download_size: 2125662
dataset_size: 4599554
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/amd__AMD-Llama-135m-details | open-llm-leaderboard | "2024-09-29T18:46:35Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:43:24Z" | ---
pretty_name: Evaluation run of amd/AMD-Llama-135m
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [amd/AMD-Llama-135m](https://huggingface.co/amd/AMD-Llama-135m)\nThe dataset is\
\ composed of 38 configuration(s), each one corresponding to one of the evaluated\
\ task.\n\nThe dataset has been created from 1 run(s). Each run can be found as\
\ a specific split in each configuration, the split being named using the timestamp\
\ of the run.The \"train\" split is always pointing to the latest results.\n\nAn\
\ additional configuration \"results\" store all the aggregated results of the run.\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/amd__AMD-Llama-135m-details\"\
,\n\tname=\"amd__AMD-Llama-135m__leaderboard_bbh_boolean_expressions\",\n\tsplit=\"\
latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results from run\
\ 2024-09-29T18-43-23.577285](https://huggingface.co/datasets/open-llm-leaderboard/amd__AMD-Llama-135m-details/blob/main/amd__AMD-Llama-135m/results_2024-09-29T18-43-23.577285.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc_norm,none\": 0.2956284861849786,\n \"acc_norm_stderr,none\"\
: 0.004947686335151244,\n \"inst_level_loose_acc,none\": 0.2482014388489209,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"exact_match,none\"\
: 0.005287009063444109,\n \"exact_match_stderr,none\": 0.001996953981139152,\n\
\ \"prompt_level_loose_acc,none\": 0.12754158964879853,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.014354940597336746,\n \"\
prompt_level_strict_acc,none\": 0.12384473197781885,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.014175305492766679,\n \"inst_level_strict_acc,none\": 0.2446043165467626,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc,none\"\
: 0.11685505319148937,\n \"acc_stderr,none\": 0.0029287943595543192,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.2938725915639646,\n \"acc_norm_stderr,none\"\
: 0.005652221363141021,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\"\
: 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004,\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5187165775401069,\n\
\ \"acc_norm_stderr,none\": 0.03663608375537843,\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\":\
\ 0.02534897002097912,\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
\n },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\"\
: 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648,\n \
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"acc_norm,none\": 0.532,\n \
\ \"acc_norm_stderr,none\": 0.031621252575725574,\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\"\
: 0.017953084777052892,\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
\n },\n \"leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\"\
: 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743,\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\"\
: 0.025537121574548162,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.28,\n \"acc_norm_stderr,none\": 0.02845414827783231,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.42,\n\
\ \"acc_norm_stderr,none\": 0.03127799950463661,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.0,\n \"acc_norm_stderr,none\":\
\ 0.0,\n \"alias\": \" - leaderboard_bbh_object_counting\"\n \
\ },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"acc_norm,none\"\
: 0.19863013698630136,\n \"acc_norm_stderr,none\": 0.03313256608889815,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.236,\n \"acc_norm_stderr,none\": 0.026909337594953852,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\"\
: 0.020593600596839998,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.5280898876404494,\n \"acc_norm_stderr,none\": 0.03752294651708463,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\"\
: 0.031584653891499004,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.284,\n \"acc_norm_stderr,none\": 0.02857695873043744,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\"\
: 0.022752024491765464,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\"\
: 0.029844039047465857,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254714,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.2525167785234899,\n \"acc_norm_stderr,none\"\
: 0.012593416400106734,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.2828282828282828,\n\
\ \"acc_norm_stderr,none\": 0.032087795587867514,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.24725274725274726,\n \"acc_norm_stderr,none\"\
: 0.018479784453482837,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.24553571428571427,\n \"acc_norm_stderr,none\": 0.020357428454484603,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.12384473197781885,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.014175305492766679,\n \
\ \"inst_level_strict_acc,none\": 0.2446043165467626,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.12754158964879853,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.014354940597336746,\n \
\ \"inst_level_loose_acc,none\": 0.24820143884892087,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.005287009063444109,\n\
\ \"exact_match_stderr,none\": 0.001996953981139152,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.006514657980456026,\n \"exact_match_stderr,none\"\
: 0.004599025618546258,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.007142857142857143,\n \"exact_match_stderr,none\"\
: 0.005041703051390571,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.006493506493506494,\n \"exact_match_stderr,none\": 0.006493506493506494,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.0051813471502590676,\n \"exact_match_stderr,none\": 0.0051813471502590676,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.007407407407407408,\n \"exact_match_stderr,none\": 0.007407407407407408,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n\
\ \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.11685505319148937,\n\
\ \"acc_stderr,none\": 0.002928794359554319,\n \"alias\":\
\ \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.376984126984127,\n \"acc_norm_stderr,none\"\
: 0.017204281211768292,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\"\
: 0.52,\n \"acc_norm_stderr,none\": 0.03166085340849512,\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"\
leaderboard_musr_object_placements\": {\n \"acc_norm,none\": 0.25390625,\n\
\ \"acc_norm_stderr,none\": 0.027256074423586178,\n \"alias\"\
: \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\"\
: 0.03041876402517494,\n \"alias\": \" - leaderboard_musr_team_allocation\"\
\n }\n },\n \"leaderboard\": {\n \"acc_norm,none\": 0.2956284861849786,\n\
\ \"acc_norm_stderr,none\": 0.004947686335151244,\n \"inst_level_loose_acc,none\"\
: 0.2482014388489209,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"exact_match,none\": 0.005287009063444109,\n \"exact_match_stderr,none\"\
: 0.001996953981139152,\n \"prompt_level_loose_acc,none\": 0.12754158964879853,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.014354940597336746,\n \
\ \"prompt_level_strict_acc,none\": 0.12384473197781885,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.014175305492766679,\n \"inst_level_strict_acc,none\": 0.2446043165467626,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc,none\":\
\ 0.11685505319148937,\n \"acc_stderr,none\": 0.0029287943595543192,\n \
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"\
acc_norm,none\": 0.2938725915639646,\n \"acc_norm_stderr,none\": 0.005652221363141021,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004,\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.5187165775401069,\n\
\ \"acc_norm_stderr,none\": 0.03663608375537843,\n \"alias\": \" \
\ - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.2,\n \"acc_norm_stderr,none\": 0.02534897002097912,\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.316,\n \"acc_norm_stderr,none\": 0.029462657598578648,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725574,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.088,\n \"acc_norm_stderr,none\": 0.017953084777052892,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.516,\n \"acc_norm_stderr,none\": 0.03166998503010743,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548162,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240245,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475363,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.28,\n \"acc_norm_stderr,none\": 0.02845414827783231,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.0,\n \"acc_norm_stderr,none\": 0.0,\n \
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.19863013698630136,\n \"acc_norm_stderr,none\"\
: 0.03313256608889815,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.128,\n \"acc_norm_stderr,none\": 0.021172081336336534,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.236,\n\
\ \"acc_norm_stderr,none\": 0.026909337594953852,\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.12,\n \"acc_norm_stderr,none\": 0.020593600596839998,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.5280898876404494,\n\
\ \"acc_norm_stderr,none\": 0.03752294651708463,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.031584653891499004,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.284,\n \
\ \"acc_norm_stderr,none\": 0.02857695873043744,\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.208,\n \"acc_norm_stderr,none\": 0.02572139890141637,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465857,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.488,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254714,\n \"alias\": \" \
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.2525167785234899,\n \"acc_norm_stderr,none\": 0.012593416400106734,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.2828282828282828,\n \"acc_norm_stderr,none\"\
: 0.032087795587867514,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.24725274725274726,\n\
\ \"acc_norm_stderr,none\": 0.018479784453482837,\n \"alias\": \"\
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.24553571428571427,\n \"acc_norm_stderr,none\"\
: 0.020357428454484603,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.12384473197781885,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.014175305492766679,\n \
\ \"inst_level_strict_acc,none\": 0.2446043165467626,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.12754158964879853,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.014354940597336746,\n \"inst_level_loose_acc,none\"\
: 0.24820143884892087,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.005287009063444109,\n \"exact_match_stderr,none\"\
: 0.001996953981139152,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.006514657980456026,\n\
\ \"exact_match_stderr,none\": 0.004599025618546258,\n \"alias\":\
\ \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\n },\n \
\ \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\": 0.0,\n \
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_geometry_hard\"\
\n },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.007142857142857143,\n \"exact_match_stderr,none\": 0.005041703051390571,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n },\n\
\ \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\": 0.006493506493506494,\n\
\ \"exact_match_stderr,none\": 0.006493506493506494,\n \"alias\":\
\ \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.0051813471502590676,\n \"exact_match_stderr,none\"\
: 0.0051813471502590676,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.007407407407407408,\n \"exact_match_stderr,none\": 0.007407407407407408,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.11685505319148937,\n \"acc_stderr,none\": 0.002928794359554319,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.376984126984127,\n \"acc_norm_stderr,none\"\
: 0.017204281211768292,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.52,\n\
\ \"acc_norm_stderr,none\": 0.03166085340849512,\n \"alias\": \" \
\ - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.25390625,\n \"acc_norm_stderr,none\": 0.027256074423586178,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\": 0.03041876402517494,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/amd/AMD-Llama-135m
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_ifeval
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-43-23.577285.jsonl'
- config_name: amd__AMD-Llama-135m__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T18_43_23.577285
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-43-23.577285.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-43-23.577285.jsonl'
---
# Dataset Card for Evaluation run of amd/AMD-Llama-135m
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [amd/AMD-Llama-135m](https://huggingface.co/amd/AMD-Llama-135m)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/amd__AMD-Llama-135m-details",
name="amd__AMD-Llama-135m__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T18-43-23.577285](https://huggingface.co/datasets/open-llm-leaderboard/amd__AMD-Llama-135m-details/blob/main/amd__AMD-Llama-135m/results_2024-09-29T18-43-23.577285.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc_norm,none": 0.2956284861849786,
"acc_norm_stderr,none": 0.004947686335151244,
"inst_level_loose_acc,none": 0.2482014388489209,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001996953981139152,
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336746,
"prompt_level_strict_acc,none": 0.12384473197781885,
"prompt_level_strict_acc_stderr,none": 0.014175305492766679,
"inst_level_strict_acc,none": 0.2446043165467626,
"inst_level_strict_acc_stderr,none": "N/A",
"acc,none": 0.11685505319148937,
"acc_stderr,none": 0.0029287943595543192,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.2938725915639646,
"acc_norm_stderr,none": 0.005652221363141021,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.28,
"acc_norm_stderr,none": 0.02845414827783231,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.0,
"acc_norm_stderr,none": 0.0,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.19863013698630136,
"acc_norm_stderr,none": 0.03313256608889815,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.5280898876404494,
"acc_norm_stderr,none": 0.03752294651708463,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2525167785234899,
"acc_norm_stderr,none": 0.012593416400106734,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.2828282828282828,
"acc_norm_stderr,none": 0.032087795587867514,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.24725274725274726,
"acc_norm_stderr,none": 0.018479784453482837,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484603,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.12384473197781885,
"prompt_level_strict_acc_stderr,none": 0.014175305492766679,
"inst_level_strict_acc,none": 0.2446043165467626,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336746,
"inst_level_loose_acc,none": 0.24820143884892087,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001996953981139152,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.006514657980456026,
"exact_match_stderr,none": 0.004599025618546258,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.11685505319148937,
"acc_stderr,none": 0.002928794359554319,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.376984126984127,
"acc_norm_stderr,none": 0.017204281211768292,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.25390625,
"acc_norm_stderr,none": 0.027256074423586178,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517494,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"acc_norm,none": 0.2956284861849786,
"acc_norm_stderr,none": 0.004947686335151244,
"inst_level_loose_acc,none": 0.2482014388489209,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001996953981139152,
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336746,
"prompt_level_strict_acc,none": 0.12384473197781885,
"prompt_level_strict_acc_stderr,none": 0.014175305492766679,
"inst_level_strict_acc,none": 0.2446043165467626,
"inst_level_strict_acc_stderr,none": "N/A",
"acc,none": 0.11685505319148937,
"acc_stderr,none": 0.0029287943595543192,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.2938725915639646,
"acc_norm_stderr,none": 0.005652221363141021,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537843,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.2,
"acc_norm_stderr,none": 0.02534897002097912,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.316,
"acc_norm_stderr,none": 0.029462657598578648,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725574,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.088,
"acc_norm_stderr,none": 0.017953084777052892,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.516,
"acc_norm_stderr,none": 0.03166998503010743,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548162,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240245,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475363,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.28,
"acc_norm_stderr,none": 0.02845414827783231,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.0,
"acc_norm_stderr,none": 0.0,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.19863013698630136,
"acc_norm_stderr,none": 0.03313256608889815,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.128,
"acc_norm_stderr,none": 0.021172081336336534,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.236,
"acc_norm_stderr,none": 0.026909337594953852,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.12,
"acc_norm_stderr,none": 0.020593600596839998,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.5280898876404494,
"acc_norm_stderr,none": 0.03752294651708463,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.031584653891499004,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.284,
"acc_norm_stderr,none": 0.02857695873043744,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.208,
"acc_norm_stderr,none": 0.02572139890141637,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465857,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254714,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2525167785234899,
"acc_norm_stderr,none": 0.012593416400106734,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.2828282828282828,
"acc_norm_stderr,none": 0.032087795587867514,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.24725274725274726,
"acc_norm_stderr,none": 0.018479784453482837,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.24553571428571427,
"acc_norm_stderr,none": 0.020357428454484603,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.12384473197781885,
"prompt_level_strict_acc_stderr,none": 0.014175305492766679,
"inst_level_strict_acc,none": 0.2446043165467626,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336746,
"inst_level_loose_acc,none": 0.24820143884892087,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001996953981139152,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.006514657980456026,
"exact_match_stderr,none": 0.004599025618546258,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.007142857142857143,
"exact_match_stderr,none": 0.005041703051390571,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.006493506493506494,
"exact_match_stderr,none": 0.006493506493506494,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.0051813471502590676,
"exact_match_stderr,none": 0.0051813471502590676,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.007407407407407408,
"exact_match_stderr,none": 0.007407407407407408,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.11685505319148937,
"acc_stderr,none": 0.002928794359554319,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.376984126984127,
"acc_norm_stderr,none": 0.017204281211768292,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.52,
"acc_norm_stderr,none": 0.03166085340849512,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.25390625,
"acc_norm_stderr,none": 0.027256074423586178,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.03041876402517494,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
ZixuanKe/cfa_unsup | ZixuanKe | "2024-09-30T00:21:59Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:44:03Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: topic
dtype: string
- name: title
dtype: string
- name: llama3_input_ids
sequence: int64
- name: llama3_attention_mask
sequence: int64
- name: llama3_special_tokens_mask
sequence: int64
- name: subset
dtype: int64
splits:
- name: train
num_bytes: 104623465
num_examples: 484
download_size: 14227248
dataset_size: 104623465
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v2-details | open-llm-leaderboard | "2024-09-29T18:49:01Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:45:44Z" | ---
pretty_name: Evaluation run of nlpguy/Mistral-NeMo-Minitron-Upscale-v2
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [nlpguy/Mistral-NeMo-Minitron-Upscale-v2](https://huggingface.co/nlpguy/Mistral-NeMo-Minitron-Upscale-v2)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v2-details\"\
,\n\tname=\"nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T18-45-43.672061](https://huggingface.co/datasets/open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v2-details/blob/main/nlpguy__Mistral-NeMo-Minitron-Upscale-v2/results_2024-09-29T18-45-43.672061.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"exact_match,none\": 0.005287009063444109,\n \"exact_match_stderr,none\"\
: 0.001988736669312895,\n \"prompt_level_loose_acc,none\": 0.11829944547134935,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.013898087176706587,\n \
\ \"acc_norm,none\": 0.37255156310805554,\n \"acc_norm_stderr,none\"\
: 0.005279122195828486,\n \"inst_level_strict_acc,none\": 0.21103117505995203,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.10351201478743069,\n \"prompt_level_strict_acc_stderr,none\": 0.01310903544648431,\n\
\ \"inst_level_loose_acc,none\": 0.22422062350119903,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"acc,none\": 0.1926529255319149,\n\
\ \"acc_stderr,none\": 0.0035955644575309613,\n \"alias\"\
: \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.3922930046866863,\n \"acc_norm_stderr,none\": 0.006134934599176392,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.64,\n \"acc_norm_stderr,none\"\
: 0.030418764025174985,\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
\n },\n \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\"\
: 0.5187165775401069,\n \"acc_norm_stderr,none\": 0.03663608375537842,\n\
\ \"alias\": \" - leaderboard_bbh_causal_judgement\"\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254709,\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\": 0.508,\n\
\ \"acc_norm_stderr,none\": 0.031682156431413803,\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.56,\n \"acc_norm_stderr,none\"\
: 0.03145724452223564,\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
\n },\n \"leaderboard_bbh_geometric_shapes\": {\n \"acc_norm,none\"\
: 0.28,\n \"acc_norm_stderr,none\": 0.028454148277832318,\n \
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"\
leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\": 0.592,\n \
\ \"acc_norm_stderr,none\": 0.03114520984654851,\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.232,\n \"acc_norm_stderr,none\"\
: 0.026750070374865164,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068748,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.031096688184825295,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223564,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.42,\n\
\ \"acc_norm_stderr,none\": 0.03127799950463661,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\"\
: 0.03049155522040556,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.410958904109589,\n \"acc_norm_stderr,none\": 0.04085902451640227,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.272,\n \"acc_norm_stderr,none\": 0.02820008829630999,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.308,\n \"acc_norm_stderr,none\": 0.029256928606501864,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\"\
: 0.026251792824605845,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.47752808988764045,\n \"acc_norm_stderr,none\": 0.03754432508487193,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.624,\n \"acc_norm_stderr,none\"\
: 0.03069633626739458,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.26,\n \"acc_norm_stderr,none\": 0.02779731575264431,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548148,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\"\
: 0.023232714782060654,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\"\
: 0.029844039047465902,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254709,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.27348993288590606,\n \"acc_norm_stderr,none\"\
: 0.012921553908220343,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.2828282828282828,\n\
\ \"acc_norm_stderr,none\": 0.032087795587867514,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.2838827838827839,\n \"acc_norm_stderr,none\"\
: 0.019313604507663257,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.25669642857142855,\n \"acc_norm_stderr,none\": 0.0206604254917247,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.10351201478743069,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.01310903544648431,\n \
\ \"inst_level_strict_acc,none\": 0.21103117505995203,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.11829944547134935,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.013898087176706587,\n \
\ \"inst_level_loose_acc,none\": 0.22422062350119903,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.005287009063444109,\n\
\ \"exact_match_stderr,none\": 0.001988736669312895,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.006514657980456026,\n \"exact_match_stderr,none\"\
: 0.004599025618546257,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\n \
\ },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.007575757575757576,\n \"exact_match_stderr,none\": 0.007575757575757562,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_intermediate_algebra_hard\"\n },\n \"leaderboard_math_num_theory_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_num_theory_hard\"\n \
\ },\n \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.02072538860103627,\n \"exact_match_stderr,none\": 0.010281417011909055,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.1926529255319149,\n \"acc_stderr,none\"\
: 0.0035955644575309613,\n \"alias\": \" - leaderboard_mmlu_pro\"\n \
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.3783068783068783,\n\
\ \"acc_norm_stderr,none\": 0.01723804003480842,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\"\
: 0.031636489531544396,\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
\n },\n \"leaderboard_musr_object_placements\": {\n \"\
acc_norm,none\": 0.28125,\n \"acc_norm_stderr,none\": 0.028155620586096754,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n\
\ \"leaderboard_musr_team_allocation\": {\n \"acc_norm,none\"\
: 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475373,\n \
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n },\n \
\ \"leaderboard\": {\n \"exact_match,none\": 0.005287009063444109,\n \
\ \"exact_match_stderr,none\": 0.001988736669312895,\n \"prompt_level_loose_acc,none\"\
: 0.11829944547134935,\n \"prompt_level_loose_acc_stderr,none\": 0.013898087176706587,\n\
\ \"acc_norm,none\": 0.37255156310805554,\n \"acc_norm_stderr,none\"\
: 0.005279122195828486,\n \"inst_level_strict_acc,none\": 0.21103117505995203,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.10351201478743069,\n \"prompt_level_strict_acc_stderr,none\": 0.01310903544648431,\n\
\ \"inst_level_loose_acc,none\": 0.22422062350119903,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"acc,none\": 0.1926529255319149,\n \"acc_stderr,none\"\
: 0.0035955644575309613,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.3922930046866863,\n \"acc_norm_stderr,none\"\
: 0.006134934599176392,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\": 0.64,\n\
\ \"acc_norm_stderr,none\": 0.030418764025174985,\n \"alias\": \"\
\ - leaderboard_bbh_boolean_expressions\"\n },\n \"leaderboard_bbh_causal_judgement\"\
: {\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537842,\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
\n },\n \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254709,\n \"alias\"\
: \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.508,\n \"acc_norm_stderr,none\": 0.031682156431413803,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223564,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.28,\n \"acc_norm_stderr,none\": 0.028454148277832318,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.592,\n \"acc_norm_stderr,none\": 0.03114520984654851,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.232,\n \"acc_norm_stderr,none\": 0.026750070374865164,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.156,\n \"acc_norm_stderr,none\": 0.022995023034068748,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.404,\n \"acc_norm_stderr,none\": 0.031096688184825295,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223564,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.364,\n \"acc_norm_stderr,none\": 0.03049155522040556,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.410958904109589,\n \"acc_norm_stderr,none\"\
: 0.04085902451640227,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.272,\n \"acc_norm_stderr,none\": 0.02820008829630999,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.308,\n\
\ \"acc_norm_stderr,none\": 0.029256928606501864,\n \"alias\": \"\
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.22,\n \"acc_norm_stderr,none\": 0.026251792824605845,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.47752808988764045,\n\
\ \"acc_norm_stderr,none\": 0.03754432508487193,\n \"alias\": \" \
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.624,\n \"acc_norm_stderr,none\": 0.03069633626739458,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.26,\n \
\ \"acc_norm_stderr,none\": 0.02779731575264431,\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.204,\n \"acc_norm_stderr,none\": 0.025537121574548148,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.16,\n \"acc_norm_stderr,none\": 0.023232714782060654,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465902,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.488,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254709,\n \"alias\": \" \
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.27348993288590606,\n \"acc_norm_stderr,none\": 0.012921553908220343,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.2828282828282828,\n \"acc_norm_stderr,none\"\
: 0.032087795587867514,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.2838827838827839,\n\
\ \"acc_norm_stderr,none\": 0.019313604507663257,\n \"alias\": \"\
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.25669642857142855,\n \"acc_norm_stderr,none\"\
: 0.0206604254917247,\n \"alias\": \" - leaderboard_gpqa_main\"\n },\n\
\ \"leaderboard_ifeval\": {\n \"prompt_level_strict_acc,none\": 0.10351201478743069,\n\
\ \"prompt_level_strict_acc_stderr,none\": 0.01310903544648431,\n \
\ \"inst_level_strict_acc,none\": 0.21103117505995203,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.11829944547134935,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.013898087176706587,\n \"inst_level_loose_acc,none\"\
: 0.22422062350119903,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.005287009063444109,\n \"exact_match_stderr,none\"\
: 0.001988736669312895,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.006514657980456026,\n\
\ \"exact_match_stderr,none\": 0.004599025618546257,\n \"alias\":\
\ \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\n },\n \
\ \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757562,\n \"alias\":\
\ \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n },\n\
\ \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\": 0.0,\n\
\ \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_num_theory_hard\"\
\n },\n \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.02072538860103627,\n \"exact_match_stderr,none\": 0.010281417011909055,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \"leaderboard_math_precalculus_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.1926529255319149,\n \"acc_stderr,none\": 0.0035955644575309613,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.3783068783068783,\n \"acc_norm_stderr,none\"\
: 0.01723804003480842,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.528,\n\
\ \"acc_norm_stderr,none\": 0.031636489531544396,\n \"alias\": \"\
\ - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.28125,\n \"acc_norm_stderr,none\": 0.028155620586096754,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.328,\n \"acc_norm_stderr,none\": 0.029752391824475373,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/nlpguy/Mistral-NeMo-Minitron-Upscale-v2
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_ifeval
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-45-43.672061.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T18_45_43.672061
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-45-43.672061.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-45-43.672061.jsonl'
---
# Dataset Card for Evaluation run of nlpguy/Mistral-NeMo-Minitron-Upscale-v2
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [nlpguy/Mistral-NeMo-Minitron-Upscale-v2](https://huggingface.co/nlpguy/Mistral-NeMo-Minitron-Upscale-v2)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v2-details",
name="nlpguy__Mistral-NeMo-Minitron-Upscale-v2__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T18-45-43.672061](https://huggingface.co/datasets/open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v2-details/blob/main/nlpguy__Mistral-NeMo-Minitron-Upscale-v2/results_2024-09-29T18-45-43.672061.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001988736669312895,
"prompt_level_loose_acc,none": 0.11829944547134935,
"prompt_level_loose_acc_stderr,none": 0.013898087176706587,
"acc_norm,none": 0.37255156310805554,
"acc_norm_stderr,none": 0.005279122195828486,
"inst_level_strict_acc,none": 0.21103117505995203,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.10351201478743069,
"prompt_level_strict_acc_stderr,none": 0.01310903544648431,
"inst_level_loose_acc,none": 0.22422062350119903,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.1926529255319149,
"acc_stderr,none": 0.0035955644575309613,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3922930046866863,
"acc_norm_stderr,none": 0.006134934599176392,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.64,
"acc_norm_stderr,none": 0.030418764025174985,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537842,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.031682156431413803,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.28,
"acc_norm_stderr,none": 0.028454148277832318,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.232,
"acc_norm_stderr,none": 0.026750070374865164,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068748,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.031096688184825295,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.03049155522040556,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.410958904109589,
"acc_norm_stderr,none": 0.04085902451640227,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.02820008829630999,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.029256928606501864,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605845,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487193,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.02779731575264431,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548148,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060654,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465902,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27348993288590606,
"acc_norm_stderr,none": 0.012921553908220343,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.2828282828282828,
"acc_norm_stderr,none": 0.032087795587867514,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2838827838827839,
"acc_norm_stderr,none": 0.019313604507663257,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.25669642857142855,
"acc_norm_stderr,none": 0.0206604254917247,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.10351201478743069,
"prompt_level_strict_acc_stderr,none": 0.01310903544648431,
"inst_level_strict_acc,none": 0.21103117505995203,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.11829944547134935,
"prompt_level_loose_acc_stderr,none": 0.013898087176706587,
"inst_level_loose_acc,none": 0.22422062350119903,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001988736669312895,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.006514657980456026,
"exact_match_stderr,none": 0.004599025618546257,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757562,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.02072538860103627,
"exact_match_stderr,none": 0.010281417011909055,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.1926529255319149,
"acc_stderr,none": 0.0035955644575309613,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.3783068783068783,
"acc_norm_stderr,none": 0.01723804003480842,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.031636489531544396,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.28125,
"acc_norm_stderr,none": 0.028155620586096754,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475373,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001988736669312895,
"prompt_level_loose_acc,none": 0.11829944547134935,
"prompt_level_loose_acc_stderr,none": 0.013898087176706587,
"acc_norm,none": 0.37255156310805554,
"acc_norm_stderr,none": 0.005279122195828486,
"inst_level_strict_acc,none": 0.21103117505995203,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.10351201478743069,
"prompt_level_strict_acc_stderr,none": 0.01310903544648431,
"inst_level_loose_acc,none": 0.22422062350119903,
"inst_level_loose_acc_stderr,none": "N/A",
"acc,none": 0.1926529255319149,
"acc_stderr,none": 0.0035955644575309613,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.3922930046866863,
"acc_norm_stderr,none": 0.006134934599176392,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.64,
"acc_norm_stderr,none": 0.030418764025174985,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537842,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.508,
"acc_norm_stderr,none": 0.031682156431413803,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.28,
"acc_norm_stderr,none": 0.028454148277832318,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.592,
"acc_norm_stderr,none": 0.03114520984654851,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.232,
"acc_norm_stderr,none": 0.026750070374865164,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.156,
"acc_norm_stderr,none": 0.022995023034068748,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.404,
"acc_norm_stderr,none": 0.031096688184825295,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.364,
"acc_norm_stderr,none": 0.03049155522040556,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.410958904109589,
"acc_norm_stderr,none": 0.04085902451640227,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.272,
"acc_norm_stderr,none": 0.02820008829630999,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.308,
"acc_norm_stderr,none": 0.029256928606501864,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.22,
"acc_norm_stderr,none": 0.026251792824605845,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.47752808988764045,
"acc_norm_stderr,none": 0.03754432508487193,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.624,
"acc_norm_stderr,none": 0.03069633626739458,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.26,
"acc_norm_stderr,none": 0.02779731575264431,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.204,
"acc_norm_stderr,none": 0.025537121574548148,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.16,
"acc_norm_stderr,none": 0.023232714782060654,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465902,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.27348993288590606,
"acc_norm_stderr,none": 0.012921553908220343,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.2828282828282828,
"acc_norm_stderr,none": 0.032087795587867514,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2838827838827839,
"acc_norm_stderr,none": 0.019313604507663257,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.25669642857142855,
"acc_norm_stderr,none": 0.0206604254917247,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.10351201478743069,
"prompt_level_strict_acc_stderr,none": 0.01310903544648431,
"inst_level_strict_acc,none": 0.21103117505995203,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.11829944547134935,
"prompt_level_loose_acc_stderr,none": 0.013898087176706587,
"inst_level_loose_acc,none": 0.22422062350119903,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.005287009063444109,
"exact_match_stderr,none": 0.001988736669312895,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.006514657980456026,
"exact_match_stderr,none": 0.004599025618546257,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757562,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.02072538860103627,
"exact_match_stderr,none": 0.010281417011909055,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.1926529255319149,
"acc_stderr,none": 0.0035955644575309613,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.3783068783068783,
"acc_norm_stderr,none": 0.01723804003480842,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.031636489531544396,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.28125,
"acc_norm_stderr,none": 0.028155620586096754,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.328,
"acc_norm_stderr,none": 0.029752391824475373,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v1-details | open-llm-leaderboard | "2024-09-29T18:49:09Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T18:46:07Z" | ---
pretty_name: Evaluation run of nlpguy/Mistral-NeMo-Minitron-Upscale-v1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [nlpguy/Mistral-NeMo-Minitron-Upscale-v1](https://huggingface.co/nlpguy/Mistral-NeMo-Minitron-Upscale-v1)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v1-details\"\
,\n\tname=\"nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T18-46-06.825022](https://huggingface.co/datasets/open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v1-details/blob/main/nlpguy__Mistral-NeMo-Minitron-Upscale-v1/results_2024-09-29T18-46-06.825022.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"inst_level_strict_acc,none\": 0.22062350119904076,\n \"\
inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.2410071942446043,\n \"inst_level_loose_acc_stderr,none\": \"N/A\"\
,\n \"exact_match,none\": 0.006797583081570997,\n \"exact_match_stderr,none\"\
: 0.0022507420201403976,\n \"acc,none\": 0.2537400265957447,\n \
\ \"acc_stderr,none\": 0.00396724207519909,\n \"acc_norm,none\"\
: 0.4145803606174601,\n \"acc_norm_stderr,none\": 0.005253217772137442,\n\
\ \"prompt_level_strict_acc,none\": 0.10905730129390019,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.013413909746312052,\n \"\
prompt_level_loose_acc,none\": 0.12754158964879853,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.014354940597336765,\n \"alias\": \"leaderboard\"\n },\n \
\ \"leaderboard_bbh\": {\n \"acc_norm,none\": 0.44697101197708733,\n\
\ \"acc_norm_stderr,none\": 0.006081313868196425,\n \"alias\"\
: \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.748,\n \"acc_norm_stderr,none\"\
: 0.02751385193303135,\n \"alias\": \" - leaderboard_bbh_boolean_expressions\"\
\n },\n \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\"\
: 0.5187165775401069,\n \"acc_norm_stderr,none\": 0.03663608375537842,\n\
\ \"alias\": \" - leaderboard_bbh_causal_judgement\"\n },\n \
\ \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.6,\n \"acc_norm_stderr,none\": 0.031046021028253237,\n \
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"\
leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\": 0.648,\n\
\ \"acc_norm_stderr,none\": 0.03026628805735994,\n \"alias\"\
: \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\"\
: 0.03167708558254708,\n \"alias\": \" - leaderboard_bbh_formal_fallacies\"\
\n },\n \"leaderboard_bbh_geometric_shapes\": {\n \"acc_norm,none\"\
: 0.396,\n \"acc_norm_stderr,none\": 0.03099319785457785,\n \
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"\
leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\": 0.536,\n \
\ \"acc_norm_stderr,none\": 0.03160397514522374,\n \"alias\": \"\
\ - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\"\
: 0.029561724955241033,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.244,\n \"acc_norm_stderr,none\": 0.027217995464553175,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\": 0.031636489531544396,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.848,\n \"acc_norm_stderr,none\": 0.02275202449176547,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.42,\n\
\ \"acc_norm_stderr,none\": 0.03127799950463661,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\"\
: 0.030881038748993915,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.3219178082191781,\n \"acc_norm_stderr,none\": 0.03879981629627135,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.356,\n \"acc_norm_stderr,none\": 0.03034368065715321,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.392,\n \"acc_norm_stderr,none\": 0.0309382076204012,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.376,\n \"acc_norm_stderr,none\"\
: 0.030696336267394583,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.550561797752809,\n \"acc_norm_stderr,none\": 0.037389649660569645,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.764,\n \"acc_norm_stderr,none\"\
: 0.026909337594953852,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.168,\n \"acc_norm_stderr,none\": 0.023692813205492585,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240262,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\"\
: 0.022752024491765464,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\"\
: 0.029844039047465902,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254709,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.2802013422818792,\n \"acc_norm_stderr,none\"\
: 0.013016817161560583,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.29797979797979796,\n\
\ \"acc_norm_stderr,none\": 0.032586303838365555,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.2893772893772894,\n \"acc_norm_stderr,none\"\
: 0.01942466387226182,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.2611607142857143,\n \"acc_norm_stderr,none\": 0.020776632223035954,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.10905730129390019,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.013413909746312052,\n \
\ \"inst_level_strict_acc,none\": 0.22062350119904076,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.12754158964879853,\n\
\ \"prompt_level_loose_acc_stderr,none\": 0.014354940597336763,\n \
\ \"inst_level_loose_acc,none\": 0.24100719424460432,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.006797583081570997,\n\
\ \"exact_match_stderr,none\": 0.0022507420201403976,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\"\
: 0.0,\n \"alias\": \" - leaderboard_math_algebra_hard\"\n },\n\
\ \"leaderboard_math_counting_and_prob_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_counting_and_prob_hard\"\n },\n \"leaderboard_math_geometry_hard\"\
: {\n \"exact_match,none\": 0.007575757575757576,\n \"exact_match_stderr,none\"\
: 0.007575757575757562,\n \"alias\": \" - leaderboard_math_geometry_hard\"\
\n },\n \"leaderboard_math_intermediate_algebra_hard\": {\n \
\ \"exact_match,none\": 0.0035714285714285713,\n \"exact_match_stderr,none\"\
: 0.0035714285714285657,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.012987012987012988,\n \"exact_match_stderr,none\": 0.009153145279150208,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.025906735751295335,\n \"exact_match_stderr,none\": 0.0114645233569532,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \"\
\ - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.2537400265957447,\n \"acc_stderr,none\"\
: 0.00396724207519909,\n \"alias\": \" - leaderboard_mmlu_pro\"\n \
\ },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.37962962962962965,\n\
\ \"acc_norm_stderr,none\": 0.01734042434129184,\n \"alias\"\
: \" - leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\"\
: {\n \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\"\
: 0.03167708558254708,\n \"alias\": \" - leaderboard_musr_murder_mysteries\"\
\n },\n \"leaderboard_musr_object_placements\": {\n \"\
acc_norm,none\": 0.2890625,\n \"acc_norm_stderr,none\": 0.02838843806999465,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n\
\ \"leaderboard_musr_team_allocation\": {\n \"acc_norm,none\"\
: 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457907,\n \
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n },\n \
\ \"leaderboard\": {\n \"inst_level_strict_acc,none\": 0.22062350119904076,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"inst_level_loose_acc,none\"\
: 0.2410071942446043,\n \"inst_level_loose_acc_stderr,none\": \"N/A\",\n\
\ \"exact_match,none\": 0.006797583081570997,\n \"exact_match_stderr,none\"\
: 0.0022507420201403976,\n \"acc,none\": 0.2537400265957447,\n \"\
acc_stderr,none\": 0.00396724207519909,\n \"acc_norm,none\": 0.4145803606174601,\n\
\ \"acc_norm_stderr,none\": 0.005253217772137442,\n \"prompt_level_strict_acc,none\"\
: 0.10905730129390019,\n \"prompt_level_strict_acc_stderr,none\": 0.013413909746312052,\n\
\ \"prompt_level_loose_acc,none\": 0.12754158964879853,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.014354940597336765,\n \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.44697101197708733,\n \"acc_norm_stderr,none\"\
: 0.006081313868196425,\n \"alias\": \" - leaderboard_bbh\"\n },\n \
\ \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\": 0.748,\n\
\ \"acc_norm_stderr,none\": 0.02751385193303135,\n \"alias\": \" \
\ - leaderboard_bbh_boolean_expressions\"\n },\n \"leaderboard_bbh_causal_judgement\"\
: {\n \"acc_norm,none\": 0.5187165775401069,\n \"acc_norm_stderr,none\"\
: 0.03663608375537842,\n \"alias\": \" - leaderboard_bbh_causal_judgement\"\
\n },\n \"leaderboard_bbh_date_understanding\": {\n \"acc_norm,none\"\
: 0.6,\n \"acc_norm_stderr,none\": 0.031046021028253237,\n \"alias\"\
: \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.648,\n \"acc_norm_stderr,none\": 0.03026628805735994,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.488,\n \"acc_norm_stderr,none\": 0.03167708558254708,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.396,\n \"acc_norm_stderr,none\": 0.03099319785457785,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.536,\n \"acc_norm_stderr,none\": 0.03160397514522374,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.32,\n \"acc_norm_stderr,none\": 0.029561724955241033,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.244,\n \"acc_norm_stderr,none\": 0.027217995464553175,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.528,\n \"acc_norm_stderr,none\": 0.031636489531544396,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.848,\n \"acc_norm_stderr,none\": 0.02275202449176547,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.42,\n \"acc_norm_stderr,none\": 0.03127799950463661,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.388,\n \"acc_norm_stderr,none\": 0.030881038748993915,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.3219178082191781,\n \"acc_norm_stderr,none\"\
: 0.03879981629627135,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.356,\n \"acc_norm_stderr,none\": 0.03034368065715321,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.392,\n\
\ \"acc_norm_stderr,none\": 0.0309382076204012,\n \"alias\": \" -\
\ leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.376,\n \"acc_norm_stderr,none\": 0.030696336267394583,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.550561797752809,\n\
\ \"acc_norm_stderr,none\": 0.037389649660569645,\n \"alias\": \"\
\ - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.764,\n \"acc_norm_stderr,none\": 0.026909337594953852,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.168,\n \
\ \"acc_norm_stderr,none\": 0.023692813205492585,\n \"alias\": \" -\
\ leaderboard_bbh_temporal_sequences\"\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
: {\n \"acc_norm,none\": 0.14,\n \"acc_norm_stderr,none\": 0.021989409645240262,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.152,\n \"acc_norm_stderr,none\": 0.022752024491765464,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.332,\n \"acc_norm_stderr,none\": 0.029844039047465902,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.488,\n\
\ \"acc_norm_stderr,none\": 0.03167708558254709,\n \"alias\": \" \
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.2802013422818792,\n \"acc_norm_stderr,none\": 0.013016817161560583,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.29797979797979796,\n \"acc_norm_stderr,none\"\
: 0.032586303838365555,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.2893772893772894,\n\
\ \"acc_norm_stderr,none\": 0.01942466387226182,\n \"alias\": \" \
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.2611607142857143,\n \"acc_norm_stderr,none\": 0.020776632223035954,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.10905730129390019,\n \"\
prompt_level_strict_acc_stderr,none\": 0.013413909746312052,\n \"inst_level_strict_acc,none\"\
: 0.22062350119904076,\n \"inst_level_strict_acc_stderr,none\": \"N/A\",\n\
\ \"prompt_level_loose_acc,none\": 0.12754158964879853,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.014354940597336763,\n \"inst_level_loose_acc,none\": 0.24100719424460432,\n\
\ \"inst_level_loose_acc_stderr,none\": \"N/A\",\n \"alias\": \" -\
\ leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\": {\n \"exact_match,none\"\
: 0.006797583081570997,\n \"exact_match_stderr,none\": 0.0022507420201403976,\n\
\ \"alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.0,\n \"exact_match_stderr,none\": 0.0,\n\
\ \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\n },\n \
\ \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\": 0.007575757575757576,\n\
\ \"exact_match_stderr,none\": 0.007575757575757562,\n \"alias\":\
\ \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.0035714285714285713,\n \"exact_match_stderr,none\"\
: 0.0035714285714285657,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.012987012987012988,\n \"exact_match_stderr,none\": 0.009153145279150208,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.025906735751295335,\n \"exact_match_stderr,none\"\
: 0.0114645233569532,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.0,\n \"exact_match_stderr,none\": 0.0,\n \"alias\": \" - leaderboard_math_precalculus_hard\"\
\n },\n \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.2537400265957447,\n\
\ \"acc_stderr,none\": 0.00396724207519909,\n \"alias\": \" - leaderboard_mmlu_pro\"\
\n },\n \"leaderboard_musr\": {\n \"acc_norm,none\": 0.37962962962962965,\n\
\ \"acc_norm_stderr,none\": 0.01734042434129184,\n \"alias\": \" -\
\ leaderboard_musr\"\n },\n \"leaderboard_musr_murder_mysteries\": {\n \
\ \"acc_norm,none\": 0.512,\n \"acc_norm_stderr,none\": 0.03167708558254708,\n\
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.2890625,\n \"acc_norm_stderr,none\": 0.02838843806999465,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.34,\n \"acc_norm_stderr,none\": 0.030020073605457907,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/nlpguy/Mistral-NeMo-Minitron-Upscale-v1
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_ifeval
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T18-46-06.825022.jsonl'
- config_name: nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T18_46_06.825022
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-46-06.825022.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T18-46-06.825022.jsonl'
---
# Dataset Card for Evaluation run of nlpguy/Mistral-NeMo-Minitron-Upscale-v1
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [nlpguy/Mistral-NeMo-Minitron-Upscale-v1](https://huggingface.co/nlpguy/Mistral-NeMo-Minitron-Upscale-v1)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v1-details",
name="nlpguy__Mistral-NeMo-Minitron-Upscale-v1__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T18-46-06.825022](https://huggingface.co/datasets/open-llm-leaderboard/nlpguy__Mistral-NeMo-Minitron-Upscale-v1-details/blob/main/nlpguy__Mistral-NeMo-Minitron-Upscale-v1/results_2024-09-29T18-46-06.825022.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"inst_level_strict_acc,none": 0.22062350119904076,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.2410071942446043,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.006797583081570997,
"exact_match_stderr,none": 0.0022507420201403976,
"acc,none": 0.2537400265957447,
"acc_stderr,none": 0.00396724207519909,
"acc_norm,none": 0.4145803606174601,
"acc_norm_stderr,none": 0.005253217772137442,
"prompt_level_strict_acc,none": 0.10905730129390019,
"prompt_level_strict_acc_stderr,none": 0.013413909746312052,
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336765,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.44697101197708733,
"acc_norm_stderr,none": 0.006081313868196425,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.748,
"acc_norm_stderr,none": 0.02751385193303135,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537842,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.031046021028253237,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.03026628805735994,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254708,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.03099319785457785,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.03160397514522374,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955241033,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.027217995464553175,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.031636489531544396,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.848,
"acc_norm_stderr,none": 0.02275202449176547,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993915,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.3219178082191781,
"acc_norm_stderr,none": 0.03879981629627135,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.356,
"acc_norm_stderr,none": 0.03034368065715321,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.0309382076204012,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.030696336267394583,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.550561797752809,
"acc_norm_stderr,none": 0.037389649660569645,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.764,
"acc_norm_stderr,none": 0.026909337594953852,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492585,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240262,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465902,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2802013422818792,
"acc_norm_stderr,none": 0.013016817161560583,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.29797979797979796,
"acc_norm_stderr,none": 0.032586303838365555,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2893772893772894,
"acc_norm_stderr,none": 0.01942466387226182,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.2611607142857143,
"acc_norm_stderr,none": 0.020776632223035954,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.10905730129390019,
"prompt_level_strict_acc_stderr,none": 0.013413909746312052,
"inst_level_strict_acc,none": 0.22062350119904076,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336763,
"inst_level_loose_acc,none": 0.24100719424460432,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.006797583081570997,
"exact_match_stderr,none": 0.0022507420201403976,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757562,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285657,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.012987012987012988,
"exact_match_stderr,none": 0.009153145279150208,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.025906735751295335,
"exact_match_stderr,none": 0.0114645233569532,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.2537400265957447,
"acc_stderr,none": 0.00396724207519909,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.37962962962962965,
"acc_norm_stderr,none": 0.01734042434129184,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254708,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.2890625,
"acc_norm_stderr,none": 0.02838843806999465,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457907,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"inst_level_strict_acc,none": 0.22062350119904076,
"inst_level_strict_acc_stderr,none": "N/A",
"inst_level_loose_acc,none": 0.2410071942446043,
"inst_level_loose_acc_stderr,none": "N/A",
"exact_match,none": 0.006797583081570997,
"exact_match_stderr,none": 0.0022507420201403976,
"acc,none": 0.2537400265957447,
"acc_stderr,none": 0.00396724207519909,
"acc_norm,none": 0.4145803606174601,
"acc_norm_stderr,none": 0.005253217772137442,
"prompt_level_strict_acc,none": 0.10905730129390019,
"prompt_level_strict_acc_stderr,none": 0.013413909746312052,
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336765,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.44697101197708733,
"acc_norm_stderr,none": 0.006081313868196425,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.748,
"acc_norm_stderr,none": 0.02751385193303135,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.5187165775401069,
"acc_norm_stderr,none": 0.03663608375537842,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.031046021028253237,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.03026628805735994,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254708,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.396,
"acc_norm_stderr,none": 0.03099319785457785,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.536,
"acc_norm_stderr,none": 0.03160397514522374,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.32,
"acc_norm_stderr,none": 0.029561724955241033,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.244,
"acc_norm_stderr,none": 0.027217995464553175,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.528,
"acc_norm_stderr,none": 0.031636489531544396,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.848,
"acc_norm_stderr,none": 0.02275202449176547,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.42,
"acc_norm_stderr,none": 0.03127799950463661,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.388,
"acc_norm_stderr,none": 0.030881038748993915,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.3219178082191781,
"acc_norm_stderr,none": 0.03879981629627135,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.356,
"acc_norm_stderr,none": 0.03034368065715321,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.392,
"acc_norm_stderr,none": 0.0309382076204012,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.376,
"acc_norm_stderr,none": 0.030696336267394583,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.550561797752809,
"acc_norm_stderr,none": 0.037389649660569645,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.764,
"acc_norm_stderr,none": 0.026909337594953852,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.168,
"acc_norm_stderr,none": 0.023692813205492585,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.14,
"acc_norm_stderr,none": 0.021989409645240262,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.152,
"acc_norm_stderr,none": 0.022752024491765464,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.332,
"acc_norm_stderr,none": 0.029844039047465902,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.488,
"acc_norm_stderr,none": 0.03167708558254709,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.2802013422818792,
"acc_norm_stderr,none": 0.013016817161560583,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.29797979797979796,
"acc_norm_stderr,none": 0.032586303838365555,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2893772893772894,
"acc_norm_stderr,none": 0.01942466387226182,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.2611607142857143,
"acc_norm_stderr,none": 0.020776632223035954,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.10905730129390019,
"prompt_level_strict_acc_stderr,none": 0.013413909746312052,
"inst_level_strict_acc,none": 0.22062350119904076,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.12754158964879853,
"prompt_level_loose_acc_stderr,none": 0.014354940597336763,
"inst_level_loose_acc,none": 0.24100719424460432,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.006797583081570997,
"exact_match_stderr,none": 0.0022507420201403976,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.007575757575757576,
"exact_match_stderr,none": 0.007575757575757562,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.0035714285714285713,
"exact_match_stderr,none": 0.0035714285714285657,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.012987012987012988,
"exact_match_stderr,none": 0.009153145279150208,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.025906735751295335,
"exact_match_stderr,none": 0.0114645233569532,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.0,
"exact_match_stderr,none": 0.0,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.2537400265957447,
"acc_stderr,none": 0.00396724207519909,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.37962962962962965,
"acc_norm_stderr,none": 0.01734042434129184,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.512,
"acc_norm_stderr,none": 0.03167708558254708,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.2890625,
"acc_norm_stderr,none": 0.02838843806999465,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.34,
"acc_norm_stderr,none": 0.030020073605457907,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
catsOfpeople/cats | catsOfpeople | "2024-09-29T19:08:55Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T19:04:34Z" | ---
dataset_info:
features:
- name: audio_path
dtype: string
- name: transcript
dtype: string
splits:
- name: train
num_bytes: 1512979
num_examples: 22056
download_size: 251342
dataset_size: 1512979
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
SHASWATSINGH3101/test | SHASWATSINGH3101 | "2024-09-29T19:31:25Z" | 0 | 0 | [
"license:cc-by-nd-4.0",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T19:12:10Z" | ---
license: cc-by-nd-4.0
---
|
Artificial-AI/DATANETFREEVPN | Artificial-AI | "2024-09-29T19:12:39Z" | 0 | 0 | [
"license:apache-2.0",
"region:us"
] | null | "2024-09-29T19:12:39Z" | ---
license: apache-2.0
---
|
ZixuanKe/trading_unsup | ZixuanKe | "2024-09-30T00:36:53Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T19:20:49Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: topic
dtype: string
- name: title
dtype: string
- name: llama3_input_ids
sequence: int64
- name: llama3_attention_mask
sequence: int64
- name: llama3_special_tokens_mask
sequence: int64
- name: subset
dtype: int64
splits:
- name: train
num_bytes: 559586682
num_examples: 2588
download_size: 77960162
dataset_size: 559586682
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
asunsada/Football51 | asunsada | "2024-09-29T19:26:14Z" | 0 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:csv",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T19:25:52Z" | ---
license: apache-2.0
---
|
AI-Uni-Stuttgart/Reddit-SGM | AI-Uni-Stuttgart | "2024-09-29T19:27:08Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T19:27:06Z" | ---
dataset_info:
features:
- name: index
dtype: int64
- name: id
dtype: string
- name: subreddit
dtype: string
- name: year
dtype: int64
- name: body_cleaned
dtype: string
- name: annotation1
dtype: string
- name: annotation2
dtype: string
- name: annotation3
dtype: string
- name: vote_segments
dtype: string
- name: vote_counts
dtype: string
- name: segment
dtype: string
- name: count
dtype: int64
- name: disagreements
dtype: string
- name: reason_disagreement
dtype: string
- name: type_socialgroup
dtype: string
- name: segment_belongs_to
dtype: string
splits:
- name: train
num_bytes: 14186239
num_examples: 3685
download_size: 1436394
dataset_size: 14186239
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
netranga/2024_ai_report | netranga | "2024-09-29T19:44:28Z" | 0 | 0 | [
"size_categories:n<1K",
"format:imagefolder",
"modality:image",
"library:datasets",
"library:mlcroissant",
"region:us",
"created-with-pdfs-to-page-images-converter",
"pdf-to-image"
] | null | "2024-09-29T19:43:22Z" | ---
size_categories:
- n<1K
tags:
- created-with-pdfs-to-page-images-converter
- pdf-to-image
---
# Dataset Card for netranga/2024_ai_report
## Dataset Description
This dataset contains images converted from PDFs using the PDFs to Page Images Converter Space.
- **Number of images:** 502
- **Number of PDFs processed:** 1
- **Sample size per PDF:** 100
- **Created on:** 2024-09-29 21:44:28
## Dataset Creation
### Source Data
The images in this dataset were generated from user-uploaded PDF files.
### Processing Steps
1. PDF files were uploaded to the PDFs to Page Images Converter.
2. Each PDF was processed, converting selected pages to images.
3. The resulting images were saved and uploaded to this dataset.
## Dataset Structure
The dataset consists of JPEG images, each representing a single page from the source PDFs.
### Data Fields
- `images/`: A folder containing all the converted images.
### Data Splits
This dataset does not have specific splits.
## Additional Information
- **Contributions:** Thanks to the PDFs to Page Images Converter for creating this dataset.
|
open-llm-leaderboard/MaziyarPanahi__calme-2.5-qwen2-7b-details | open-llm-leaderboard | "2024-09-29T19:57:28Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T19:54:07Z" | ---
pretty_name: Evaluation run of MaziyarPanahi/calme-2.5-qwen2-7b
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [MaziyarPanahi/calme-2.5-qwen2-7b](https://huggingface.co/MaziyarPanahi/calme-2.5-qwen2-7b)\n\
The dataset is composed of 38 configuration(s), each one corresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\
\ be found as a specific split in each configuration, the split being named using\
\ the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\
```python\nfrom datasets import load_dataset\ndata = load_dataset(\n\t\"open-llm-leaderboard/MaziyarPanahi__calme-2.5-qwen2-7b-details\"\
,\n\tname=\"MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_boolean_expressions\"\
,\n\tsplit=\"latest\"\n)\n```\n\n## Latest results\n\nThese are the [latest results\
\ from run 2024-09-29T19-54-06.744164](https://huggingface.co/datasets/open-llm-leaderboard/MaziyarPanahi__calme-2.5-qwen2-7b-details/blob/main/MaziyarPanahi__calme-2.5-qwen2-7b/results_2024-09-29T19-54-06.744164.json)\
\ (note that there might be results for other tasks in the repos if successive evals\
\ didn't cover the same tasks. You find each in the results and the \"latest\" split\
\ for each eval):\n\n```python\n{\n \"all\": {\n \"leaderboard\": {\n\
\ \"acc,none\": 0.3681848404255319,\n \"acc_stderr,none\"\
: 0.00439720900920118,\n \"exact_match,none\": 0.20694864048338368,\n\
\ \"exact_match_stderr,none\": 0.010320240264084115,\n \"\
prompt_level_loose_acc,none\": 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.01953085669122253,\n \"inst_level_strict_acc,none\": 0.381294964028777,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\"\
: 0.4563497211052017,\n \"acc_norm_stderr,none\": 0.00532499454536872,\n\
\ \"inst_level_loose_acc,none\": 0.42685851318944845,\n \"\
inst_level_loose_acc_stderr,none\": \"N/A\",\n \"prompt_level_strict_acc,none\"\
: 0.2476894639556377,\n \"prompt_level_strict_acc_stderr,none\": 0.018576139285185186,\n\
\ \"alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\"\
: {\n \"acc_norm,none\": 0.4865474743968061,\n \"acc_norm_stderr,none\"\
: 0.006127359256794648,\n \"alias\": \" - leaderboard_bbh\"\n \
\ },\n \"leaderboard_bbh_boolean_expressions\": {\n \"acc_norm,none\"\
: 0.82,\n \"acc_norm_stderr,none\": 0.024346890650293523,\n \
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \
\ \"leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.6042780748663101,\n\
\ \"acc_norm_stderr,none\": 0.03585560071592546,\n \"alias\"\
: \" - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\"\
: 0.03063032594455831,\n \"alias\": \" - leaderboard_bbh_date_understanding\"\
\n },\n \"leaderboard_bbh_disambiguation_qa\": {\n \"acc_norm,none\"\
: 0.72,\n \"acc_norm_stderr,none\": 0.02845414827783232,\n \
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"\
leaderboard_bbh_formal_fallacies\": {\n \"acc_norm,none\": 0.532,\n \
\ \"acc_norm_stderr,none\": 0.031621252575725504,\n \"alias\"\
: \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\"\
: 0.029150213374159673,\n \"alias\": \" - leaderboard_bbh_geometric_shapes\"\
\n },\n \"leaderboard_bbh_hyperbaton\": {\n \"acc_norm,none\"\
: 0.6,\n \"acc_norm_stderr,none\": 0.031046021028253244,\n \
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\"\
: 0.03158465389149899,\n \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\
\n },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \
\ \"acc_norm,none\": 0.468,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_seven_objects\"\n\
\ },\n \"leaderboard_bbh_logical_deduction_three_objects\": {\n \
\ \"acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511683,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n\
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"\
acc_norm,none\": 0.648,\n \"acc_norm_stderr,none\": 0.03026628805735993,\n\
\ \"alias\": \" - leaderboard_bbh_movie_recommendation\"\n },\n\
\ \"leaderboard_bbh_navigate\": {\n \"acc_norm,none\": 0.584,\n\
\ \"acc_norm_stderr,none\": 0.031235856237014553,\n \"alias\"\
: \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\"\
: 0.030418764025174985,\n \"alias\": \" - leaderboard_bbh_object_counting\"\
\n },\n \"leaderboard_bbh_penguins_in_a_table\": {\n \"\
acc_norm,none\": 0.5547945205479452,\n \"acc_norm_stderr,none\": 0.04127264774457449,\n\
\ \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\n },\n\
\ \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n\
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\"\
: 0.564,\n \"acc_norm_stderr,none\": 0.03142556706028129,\n \
\ \"alias\": \" - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.4,\n \"acc_norm_stderr,none\":\
\ 0.031046021028253244,\n \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\
\n },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\"\
: 0.4606741573033708,\n \"acc_norm_stderr,none\": 0.0374658773638787,\n\
\ \"alias\": \" - leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.808,\n \"acc_norm_stderr,none\"\
: 0.024960691989171998,\n \"alias\": \" - leaderboard_bbh_sports_understanding\"\
\n },\n \"leaderboard_bbh_temporal_sequences\": {\n \"\
acc_norm,none\": 0.24,\n \"acc_norm_stderr,none\": 0.027065293652239003,\n\
\ \"alias\": \" - leaderboard_bbh_temporal_sequences\"\n },\n\
\ \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.02172334261705208,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
: {\n \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\"\
: 0.021450980824038096,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
: {\n \"acc_norm,none\": 0.28,\n \"acc_norm_stderr,none\"\
: 0.028454148277832318,\n \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\"\
: 0.548,\n \"acc_norm_stderr,none\": 0.03153986449255662,\n \
\ \"alias\": \" - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\"\
: {\n \"acc_norm,none\": 0.3104026845637584,\n \"acc_norm_stderr,none\"\
: 0.013409389307482606,\n \"alias\": \" - leaderboard_gpqa\"\n \
\ },\n \"leaderboard_gpqa_diamond\": {\n \"acc_norm,none\": 0.3383838383838384,\n\
\ \"acc_norm_stderr,none\": 0.03371124142626303,\n \"alias\"\
: \" - leaderboard_gpqa_diamond\"\n },\n \"leaderboard_gpqa_extended\"\
: {\n \"acc_norm,none\": 0.2948717948717949,\n \"acc_norm_stderr,none\"\
: 0.01953225605335248,\n \"alias\": \" - leaderboard_gpqa_extended\"\
\n },\n \"leaderboard_gpqa_main\": {\n \"acc_norm,none\"\
: 0.3169642857142857,\n \"acc_norm_stderr,none\": 0.022007621584824805,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.2476894639556377,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.018576139285185186,\n \
\ \"inst_level_strict_acc,none\": 0.381294964028777,\n \"inst_level_strict_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_loose_acc,none\": 0.2902033271719039,\n \
\ \"prompt_level_loose_acc_stderr,none\": 0.01953085669122253,\n \
\ \"inst_level_loose_acc,none\": 0.42685851318944845,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \
\ \"leaderboard_math_hard\": {\n \"exact_match,none\": 0.20694864048338368,\n\
\ \"exact_match_stderr,none\": 0.010320240264084115,\n \"\
alias\": \" - leaderboard_math_hard\"\n },\n \"leaderboard_math_algebra_hard\"\
: {\n \"exact_match,none\": 0.4201954397394137,\n \"exact_match_stderr,none\"\
: 0.028216670555899662,\n \"alias\": \" - leaderboard_math_algebra_hard\"\
\n },\n \"leaderboard_math_counting_and_prob_hard\": {\n \
\ \"exact_match,none\": 0.13821138211382114,\n \"exact_match_stderr,none\"\
: 0.03124585165489032,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.09090909090909091,\n \"exact_match_stderr,none\": 0.02511722563616079,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \
\ \"leaderboard_math_intermediate_algebra_hard\": {\n \"exact_match,none\"\
: 0.04285714285714286,\n \"exact_match_stderr,none\": 0.012125450612513616,\n\
\ \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\n \
\ },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.18181818181818182,\n \"exact_match_stderr,none\": 0.03118156093500157,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \
\ \"leaderboard_math_prealgebra_hard\": {\n \"exact_match,none\"\
: 0.35751295336787564,\n \"exact_match_stderr,none\": 0.03458816042181005,\n\
\ \"alias\": \" - leaderboard_math_prealgebra_hard\"\n },\n \
\ \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.05185185185185185,\n \"exact_match_stderr,none\": 0.019154368449050517,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n\
\ \"leaderboard_mmlu_pro\": {\n \"acc,none\": 0.3681848404255319,\n\
\ \"acc_stderr,none\": 0.00439720900920118,\n \"alias\": \"\
\ - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\": {\n \
\ \"acc_norm,none\": 0.45634920634920634,\n \"acc_norm_stderr,none\"\
: 0.017922847958440576,\n \"alias\": \" - leaderboard_musr\"\n \
\ },\n \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\"\
: 0.56,\n \"acc_norm_stderr,none\": 0.03145724452223564,\n \
\ \"alias\": \" - leaderboard_musr_murder_mysteries\"\n },\n \"\
leaderboard_musr_object_placements\": {\n \"acc_norm,none\": 0.44140625,\n\
\ \"acc_norm_stderr,none\": 0.031095474260005376,\n \"alias\"\
: \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\"\
: 0.030562070620993167,\n \"alias\": \" - leaderboard_musr_team_allocation\"\
\n }\n },\n \"leaderboard\": {\n \"acc,none\": 0.3681848404255319,\n\
\ \"acc_stderr,none\": 0.00439720900920118,\n \"exact_match,none\"\
: 0.20694864048338368,\n \"exact_match_stderr,none\": 0.010320240264084115,\n\
\ \"prompt_level_loose_acc,none\": 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\"\
: 0.01953085669122253,\n \"inst_level_strict_acc,none\": 0.381294964028777,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"acc_norm,none\"\
: 0.4563497211052017,\n \"acc_norm_stderr,none\": 0.00532499454536872,\n\
\ \"inst_level_loose_acc,none\": 0.42685851318944845,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"prompt_level_strict_acc,none\": 0.2476894639556377,\n \
\ \"prompt_level_strict_acc_stderr,none\": 0.018576139285185186,\n \"\
alias\": \"leaderboard\"\n },\n \"leaderboard_bbh\": {\n \"acc_norm,none\"\
: 0.4865474743968061,\n \"acc_norm_stderr,none\": 0.006127359256794648,\n\
\ \"alias\": \" - leaderboard_bbh\"\n },\n \"leaderboard_bbh_boolean_expressions\"\
: {\n \"acc_norm,none\": 0.82,\n \"acc_norm_stderr,none\": 0.024346890650293523,\n\
\ \"alias\": \" - leaderboard_bbh_boolean_expressions\"\n },\n \"\
leaderboard_bbh_causal_judgement\": {\n \"acc_norm,none\": 0.6042780748663101,\n\
\ \"acc_norm_stderr,none\": 0.03585560071592546,\n \"alias\": \" \
\ - leaderboard_bbh_causal_judgement\"\n },\n \"leaderboard_bbh_date_understanding\"\
: {\n \"acc_norm,none\": 0.372,\n \"acc_norm_stderr,none\": 0.03063032594455831,\n\
\ \"alias\": \" - leaderboard_bbh_date_understanding\"\n },\n \"leaderboard_bbh_disambiguation_qa\"\
: {\n \"acc_norm,none\": 0.72,\n \"acc_norm_stderr,none\": 0.02845414827783232,\n\
\ \"alias\": \" - leaderboard_bbh_disambiguation_qa\"\n },\n \"leaderboard_bbh_formal_fallacies\"\
: {\n \"acc_norm,none\": 0.532,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n\
\ \"alias\": \" - leaderboard_bbh_formal_fallacies\"\n },\n \"leaderboard_bbh_geometric_shapes\"\
: {\n \"acc_norm,none\": 0.304,\n \"acc_norm_stderr,none\": 0.029150213374159673,\n\
\ \"alias\": \" - leaderboard_bbh_geometric_shapes\"\n },\n \"leaderboard_bbh_hyperbaton\"\
: {\n \"acc_norm,none\": 0.6,\n \"acc_norm_stderr,none\": 0.031046021028253244,\n\
\ \"alias\": \" - leaderboard_bbh_hyperbaton\"\n },\n \"leaderboard_bbh_logical_deduction_five_objects\"\
: {\n \"acc_norm,none\": 0.46,\n \"acc_norm_stderr,none\": 0.03158465389149899,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_five_objects\"\n \
\ },\n \"leaderboard_bbh_logical_deduction_seven_objects\": {\n \"acc_norm,none\"\
: 0.468,\n \"acc_norm_stderr,none\": 0.031621252575725504,\n \"alias\"\
: \" - leaderboard_bbh_logical_deduction_seven_objects\"\n },\n \"leaderboard_bbh_logical_deduction_three_objects\"\
: {\n \"acc_norm,none\": 0.652,\n \"acc_norm_stderr,none\": 0.030186568464511683,\n\
\ \"alias\": \" - leaderboard_bbh_logical_deduction_three_objects\"\n \
\ },\n \"leaderboard_bbh_movie_recommendation\": {\n \"acc_norm,none\"\
: 0.648,\n \"acc_norm_stderr,none\": 0.03026628805735993,\n \"alias\"\
: \" - leaderboard_bbh_movie_recommendation\"\n },\n \"leaderboard_bbh_navigate\"\
: {\n \"acc_norm,none\": 0.584,\n \"acc_norm_stderr,none\": 0.031235856237014553,\n\
\ \"alias\": \" - leaderboard_bbh_navigate\"\n },\n \"leaderboard_bbh_object_counting\"\
: {\n \"acc_norm,none\": 0.36,\n \"acc_norm_stderr,none\": 0.030418764025174985,\n\
\ \"alias\": \" - leaderboard_bbh_object_counting\"\n },\n \"leaderboard_bbh_penguins_in_a_table\"\
: {\n \"acc_norm,none\": 0.5547945205479452,\n \"acc_norm_stderr,none\"\
: 0.04127264774457449,\n \"alias\": \" - leaderboard_bbh_penguins_in_a_table\"\
\n },\n \"leaderboard_bbh_reasoning_about_colored_objects\": {\n \"\
acc_norm,none\": 0.48,\n \"acc_norm_stderr,none\": 0.031660853408495185,\n\
\ \"alias\": \" - leaderboard_bbh_reasoning_about_colored_objects\"\n \
\ },\n \"leaderboard_bbh_ruin_names\": {\n \"acc_norm,none\": 0.564,\n\
\ \"acc_norm_stderr,none\": 0.03142556706028129,\n \"alias\": \" \
\ - leaderboard_bbh_ruin_names\"\n },\n \"leaderboard_bbh_salient_translation_error_detection\"\
: {\n \"acc_norm,none\": 0.4,\n \"acc_norm_stderr,none\": 0.031046021028253244,\n\
\ \"alias\": \" - leaderboard_bbh_salient_translation_error_detection\"\n\
\ },\n \"leaderboard_bbh_snarks\": {\n \"acc_norm,none\": 0.4606741573033708,\n\
\ \"acc_norm_stderr,none\": 0.0374658773638787,\n \"alias\": \" -\
\ leaderboard_bbh_snarks\"\n },\n \"leaderboard_bbh_sports_understanding\"\
: {\n \"acc_norm,none\": 0.808,\n \"acc_norm_stderr,none\": 0.024960691989171998,\n\
\ \"alias\": \" - leaderboard_bbh_sports_understanding\"\n },\n \"\
leaderboard_bbh_temporal_sequences\": {\n \"acc_norm,none\": 0.24,\n \
\ \"acc_norm_stderr,none\": 0.027065293652239003,\n \"alias\": \" - leaderboard_bbh_temporal_sequences\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_five_objects\": {\n \
\ \"acc_norm,none\": 0.136,\n \"acc_norm_stderr,none\": 0.02172334261705208,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_five_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_seven_objects\": {\n \
\ \"acc_norm,none\": 0.132,\n \"acc_norm_stderr,none\": 0.021450980824038096,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_seven_objects\"\
\n },\n \"leaderboard_bbh_tracking_shuffled_objects_three_objects\": {\n \
\ \"acc_norm,none\": 0.28,\n \"acc_norm_stderr,none\": 0.028454148277832318,\n\
\ \"alias\": \" - leaderboard_bbh_tracking_shuffled_objects_three_objects\"\
\n },\n \"leaderboard_bbh_web_of_lies\": {\n \"acc_norm,none\": 0.548,\n\
\ \"acc_norm_stderr,none\": 0.03153986449255662,\n \"alias\": \" \
\ - leaderboard_bbh_web_of_lies\"\n },\n \"leaderboard_gpqa\": {\n \
\ \"acc_norm,none\": 0.3104026845637584,\n \"acc_norm_stderr,none\": 0.013409389307482606,\n\
\ \"alias\": \" - leaderboard_gpqa\"\n },\n \"leaderboard_gpqa_diamond\"\
: {\n \"acc_norm,none\": 0.3383838383838384,\n \"acc_norm_stderr,none\"\
: 0.03371124142626303,\n \"alias\": \" - leaderboard_gpqa_diamond\"\n \
\ },\n \"leaderboard_gpqa_extended\": {\n \"acc_norm,none\": 0.2948717948717949,\n\
\ \"acc_norm_stderr,none\": 0.01953225605335248,\n \"alias\": \" \
\ - leaderboard_gpqa_extended\"\n },\n \"leaderboard_gpqa_main\": {\n \
\ \"acc_norm,none\": 0.3169642857142857,\n \"acc_norm_stderr,none\": 0.022007621584824805,\n\
\ \"alias\": \" - leaderboard_gpqa_main\"\n },\n \"leaderboard_ifeval\"\
: {\n \"prompt_level_strict_acc,none\": 0.2476894639556377,\n \"prompt_level_strict_acc_stderr,none\"\
: 0.018576139285185186,\n \"inst_level_strict_acc,none\": 0.381294964028777,\n\
\ \"inst_level_strict_acc_stderr,none\": \"N/A\",\n \"prompt_level_loose_acc,none\"\
: 0.2902033271719039,\n \"prompt_level_loose_acc_stderr,none\": 0.01953085669122253,\n\
\ \"inst_level_loose_acc,none\": 0.42685851318944845,\n \"inst_level_loose_acc_stderr,none\"\
: \"N/A\",\n \"alias\": \" - leaderboard_ifeval\"\n },\n \"leaderboard_math_hard\"\
: {\n \"exact_match,none\": 0.20694864048338368,\n \"exact_match_stderr,none\"\
: 0.010320240264084115,\n \"alias\": \" - leaderboard_math_hard\"\n },\n\
\ \"leaderboard_math_algebra_hard\": {\n \"exact_match,none\": 0.4201954397394137,\n\
\ \"exact_match_stderr,none\": 0.028216670555899662,\n \"alias\":\
\ \" - leaderboard_math_algebra_hard\"\n },\n \"leaderboard_math_counting_and_prob_hard\"\
: {\n \"exact_match,none\": 0.13821138211382114,\n \"exact_match_stderr,none\"\
: 0.03124585165489032,\n \"alias\": \" - leaderboard_math_counting_and_prob_hard\"\
\n },\n \"leaderboard_math_geometry_hard\": {\n \"exact_match,none\"\
: 0.09090909090909091,\n \"exact_match_stderr,none\": 0.02511722563616079,\n\
\ \"alias\": \" - leaderboard_math_geometry_hard\"\n },\n \"leaderboard_math_intermediate_algebra_hard\"\
: {\n \"exact_match,none\": 0.04285714285714286,\n \"exact_match_stderr,none\"\
: 0.012125450612513616,\n \"alias\": \" - leaderboard_math_intermediate_algebra_hard\"\
\n },\n \"leaderboard_math_num_theory_hard\": {\n \"exact_match,none\"\
: 0.18181818181818182,\n \"exact_match_stderr,none\": 0.03118156093500157,\n\
\ \"alias\": \" - leaderboard_math_num_theory_hard\"\n },\n \"leaderboard_math_prealgebra_hard\"\
: {\n \"exact_match,none\": 0.35751295336787564,\n \"exact_match_stderr,none\"\
: 0.03458816042181005,\n \"alias\": \" - leaderboard_math_prealgebra_hard\"\
\n },\n \"leaderboard_math_precalculus_hard\": {\n \"exact_match,none\"\
: 0.05185185185185185,\n \"exact_match_stderr,none\": 0.019154368449050517,\n\
\ \"alias\": \" - leaderboard_math_precalculus_hard\"\n },\n \"leaderboard_mmlu_pro\"\
: {\n \"acc,none\": 0.3681848404255319,\n \"acc_stderr,none\": 0.00439720900920118,\n\
\ \"alias\": \" - leaderboard_mmlu_pro\"\n },\n \"leaderboard_musr\"\
: {\n \"acc_norm,none\": 0.45634920634920634,\n \"acc_norm_stderr,none\"\
: 0.017922847958440576,\n \"alias\": \" - leaderboard_musr\"\n },\n \
\ \"leaderboard_musr_murder_mysteries\": {\n \"acc_norm,none\": 0.56,\n\
\ \"acc_norm_stderr,none\": 0.03145724452223564,\n \"alias\": \" \
\ - leaderboard_musr_murder_mysteries\"\n },\n \"leaderboard_musr_object_placements\"\
: {\n \"acc_norm,none\": 0.44140625,\n \"acc_norm_stderr,none\": 0.031095474260005376,\n\
\ \"alias\": \" - leaderboard_musr_object_placements\"\n },\n \"leaderboard_musr_team_allocation\"\
: {\n \"acc_norm,none\": 0.368,\n \"acc_norm_stderr,none\": 0.030562070620993167,\n\
\ \"alias\": \" - leaderboard_musr_team_allocation\"\n }\n}\n```"
repo_url: https://huggingface.co/MaziyarPanahi/calme-2.5-qwen2-7b
leaderboard_url: ''
point_of_contact: ''
configs:
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_boolean_expressions
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_boolean_expressions_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_causal_judgement
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_causal_judgement_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_date_understanding
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_date_understanding_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_disambiguation_qa
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_disambiguation_qa_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_formal_fallacies
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_formal_fallacies_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_geometric_shapes
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_geometric_shapes_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_hyperbaton
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_hyperbaton_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_logical_deduction_five_objects
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_five_objects_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_logical_deduction_seven_objects
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_seven_objects_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_logical_deduction_three_objects
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_logical_deduction_three_objects_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_movie_recommendation
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_movie_recommendation_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_navigate
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_navigate_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_object_counting
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_object_counting_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_penguins_in_a_table
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_penguins_in_a_table_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_reasoning_about_colored_objects
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_reasoning_about_colored_objects_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_ruin_names
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_ruin_names_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_salient_translation_error_detection
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_salient_translation_error_detection_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_snarks
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_snarks_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_sports_understanding
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_sports_understanding_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_temporal_sequences
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_temporal_sequences_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_tracking_shuffled_objects_five_objects
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_five_objects_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_tracking_shuffled_objects_seven_objects
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_seven_objects_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_tracking_shuffled_objects_three_objects
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_tracking_shuffled_objects_three_objects_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_web_of_lies
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_bbh_web_of_lies_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_gpqa_diamond
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_diamond_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_gpqa_extended
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_extended_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_gpqa_main
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_gpqa_main_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_ifeval
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_ifeval_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_ifeval_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_math_algebra_hard
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_algebra_hard_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_math_counting_and_prob_hard
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_counting_and_prob_hard_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_math_geometry_hard
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_geometry_hard_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_math_intermediate_algebra_hard
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_intermediate_algebra_hard_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_math_num_theory_hard
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_num_theory_hard_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_math_prealgebra_hard
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_prealgebra_hard_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_math_precalculus_hard
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_math_precalculus_hard_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_mmlu_pro
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_mmlu_pro_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_musr_murder_mysteries
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_murder_mysteries_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_musr_object_placements
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_object_placements_2024-09-29T19-54-06.744164.jsonl'
- config_name: MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_musr_team_allocation
data_files:
- split: 2024_09_29T19_54_06.744164
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T19-54-06.744164.jsonl'
- split: latest
path:
- '**/samples_leaderboard_musr_team_allocation_2024-09-29T19-54-06.744164.jsonl'
---
# Dataset Card for Evaluation run of MaziyarPanahi/calme-2.5-qwen2-7b
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [MaziyarPanahi/calme-2.5-qwen2-7b](https://huggingface.co/MaziyarPanahi/calme-2.5-qwen2-7b)
The dataset is composed of 38 configuration(s), each one corresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run.
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset(
"open-llm-leaderboard/MaziyarPanahi__calme-2.5-qwen2-7b-details",
name="MaziyarPanahi__calme-2.5-qwen2-7b__leaderboard_bbh_boolean_expressions",
split="latest"
)
```
## Latest results
These are the [latest results from run 2024-09-29T19-54-06.744164](https://huggingface.co/datasets/open-llm-leaderboard/MaziyarPanahi__calme-2.5-qwen2-7b-details/blob/main/MaziyarPanahi__calme-2.5-qwen2-7b/results_2024-09-29T19-54-06.744164.json) (note that there might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"leaderboard": {
"acc,none": 0.3681848404255319,
"acc_stderr,none": 0.00439720900920118,
"exact_match,none": 0.20694864048338368,
"exact_match_stderr,none": 0.010320240264084115,
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.01953085669122253,
"inst_level_strict_acc,none": 0.381294964028777,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.4563497211052017,
"acc_norm_stderr,none": 0.00532499454536872,
"inst_level_loose_acc,none": 0.42685851318944845,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.2476894639556377,
"prompt_level_strict_acc_stderr,none": 0.018576139285185186,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4865474743968061,
"acc_norm_stderr,none": 0.006127359256794648,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.82,
"acc_norm_stderr,none": 0.024346890650293523,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.03585560071592546,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.03063032594455831,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.72,
"acc_norm_stderr,none": 0.02845414827783232,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.029150213374159673,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.031046021028253244,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.03158465389149899,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511683,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.03026628805735993,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014553,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.030418764025174985,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.5547945205479452,
"acc_norm_stderr,none": 0.04127264774457449,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028129,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253244,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.4606741573033708,
"acc_norm_stderr,none": 0.0374658773638787,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.808,
"acc_norm_stderr,none": 0.024960691989171998,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652239003,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.02172334261705208,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038096,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.28,
"acc_norm_stderr,none": 0.028454148277832318,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255662,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3104026845637584,
"acc_norm_stderr,none": 0.013409389307482606,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.3383838383838384,
"acc_norm_stderr,none": 0.03371124142626303,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2948717948717949,
"acc_norm_stderr,none": 0.01953225605335248,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.3169642857142857,
"acc_norm_stderr,none": 0.022007621584824805,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.2476894639556377,
"prompt_level_strict_acc_stderr,none": 0.018576139285185186,
"inst_level_strict_acc,none": 0.381294964028777,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.01953085669122253,
"inst_level_loose_acc,none": 0.42685851318944845,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.20694864048338368,
"exact_match_stderr,none": 0.010320240264084115,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.4201954397394137,
"exact_match_stderr,none": 0.028216670555899662,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.13821138211382114,
"exact_match_stderr,none": 0.03124585165489032,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.09090909090909091,
"exact_match_stderr,none": 0.02511722563616079,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.04285714285714286,
"exact_match_stderr,none": 0.012125450612513616,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.18181818181818182,
"exact_match_stderr,none": 0.03118156093500157,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.35751295336787564,
"exact_match_stderr,none": 0.03458816042181005,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050517,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.3681848404255319,
"acc_stderr,none": 0.00439720900920118,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.45634920634920634,
"acc_norm_stderr,none": 0.017922847958440576,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.44140625,
"acc_norm_stderr,none": 0.031095474260005376,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.030562070620993167,
"alias": " - leaderboard_musr_team_allocation"
}
},
"leaderboard": {
"acc,none": 0.3681848404255319,
"acc_stderr,none": 0.00439720900920118,
"exact_match,none": 0.20694864048338368,
"exact_match_stderr,none": 0.010320240264084115,
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.01953085669122253,
"inst_level_strict_acc,none": 0.381294964028777,
"inst_level_strict_acc_stderr,none": "N/A",
"acc_norm,none": 0.4563497211052017,
"acc_norm_stderr,none": 0.00532499454536872,
"inst_level_loose_acc,none": 0.42685851318944845,
"inst_level_loose_acc_stderr,none": "N/A",
"prompt_level_strict_acc,none": 0.2476894639556377,
"prompt_level_strict_acc_stderr,none": 0.018576139285185186,
"alias": "leaderboard"
},
"leaderboard_bbh": {
"acc_norm,none": 0.4865474743968061,
"acc_norm_stderr,none": 0.006127359256794648,
"alias": " - leaderboard_bbh"
},
"leaderboard_bbh_boolean_expressions": {
"acc_norm,none": 0.82,
"acc_norm_stderr,none": 0.024346890650293523,
"alias": " - leaderboard_bbh_boolean_expressions"
},
"leaderboard_bbh_causal_judgement": {
"acc_norm,none": 0.6042780748663101,
"acc_norm_stderr,none": 0.03585560071592546,
"alias": " - leaderboard_bbh_causal_judgement"
},
"leaderboard_bbh_date_understanding": {
"acc_norm,none": 0.372,
"acc_norm_stderr,none": 0.03063032594455831,
"alias": " - leaderboard_bbh_date_understanding"
},
"leaderboard_bbh_disambiguation_qa": {
"acc_norm,none": 0.72,
"acc_norm_stderr,none": 0.02845414827783232,
"alias": " - leaderboard_bbh_disambiguation_qa"
},
"leaderboard_bbh_formal_fallacies": {
"acc_norm,none": 0.532,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_formal_fallacies"
},
"leaderboard_bbh_geometric_shapes": {
"acc_norm,none": 0.304,
"acc_norm_stderr,none": 0.029150213374159673,
"alias": " - leaderboard_bbh_geometric_shapes"
},
"leaderboard_bbh_hyperbaton": {
"acc_norm,none": 0.6,
"acc_norm_stderr,none": 0.031046021028253244,
"alias": " - leaderboard_bbh_hyperbaton"
},
"leaderboard_bbh_logical_deduction_five_objects": {
"acc_norm,none": 0.46,
"acc_norm_stderr,none": 0.03158465389149899,
"alias": " - leaderboard_bbh_logical_deduction_five_objects"
},
"leaderboard_bbh_logical_deduction_seven_objects": {
"acc_norm,none": 0.468,
"acc_norm_stderr,none": 0.031621252575725504,
"alias": " - leaderboard_bbh_logical_deduction_seven_objects"
},
"leaderboard_bbh_logical_deduction_three_objects": {
"acc_norm,none": 0.652,
"acc_norm_stderr,none": 0.030186568464511683,
"alias": " - leaderboard_bbh_logical_deduction_three_objects"
},
"leaderboard_bbh_movie_recommendation": {
"acc_norm,none": 0.648,
"acc_norm_stderr,none": 0.03026628805735993,
"alias": " - leaderboard_bbh_movie_recommendation"
},
"leaderboard_bbh_navigate": {
"acc_norm,none": 0.584,
"acc_norm_stderr,none": 0.031235856237014553,
"alias": " - leaderboard_bbh_navigate"
},
"leaderboard_bbh_object_counting": {
"acc_norm,none": 0.36,
"acc_norm_stderr,none": 0.030418764025174985,
"alias": " - leaderboard_bbh_object_counting"
},
"leaderboard_bbh_penguins_in_a_table": {
"acc_norm,none": 0.5547945205479452,
"acc_norm_stderr,none": 0.04127264774457449,
"alias": " - leaderboard_bbh_penguins_in_a_table"
},
"leaderboard_bbh_reasoning_about_colored_objects": {
"acc_norm,none": 0.48,
"acc_norm_stderr,none": 0.031660853408495185,
"alias": " - leaderboard_bbh_reasoning_about_colored_objects"
},
"leaderboard_bbh_ruin_names": {
"acc_norm,none": 0.564,
"acc_norm_stderr,none": 0.03142556706028129,
"alias": " - leaderboard_bbh_ruin_names"
},
"leaderboard_bbh_salient_translation_error_detection": {
"acc_norm,none": 0.4,
"acc_norm_stderr,none": 0.031046021028253244,
"alias": " - leaderboard_bbh_salient_translation_error_detection"
},
"leaderboard_bbh_snarks": {
"acc_norm,none": 0.4606741573033708,
"acc_norm_stderr,none": 0.0374658773638787,
"alias": " - leaderboard_bbh_snarks"
},
"leaderboard_bbh_sports_understanding": {
"acc_norm,none": 0.808,
"acc_norm_stderr,none": 0.024960691989171998,
"alias": " - leaderboard_bbh_sports_understanding"
},
"leaderboard_bbh_temporal_sequences": {
"acc_norm,none": 0.24,
"acc_norm_stderr,none": 0.027065293652239003,
"alias": " - leaderboard_bbh_temporal_sequences"
},
"leaderboard_bbh_tracking_shuffled_objects_five_objects": {
"acc_norm,none": 0.136,
"acc_norm_stderr,none": 0.02172334261705208,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_five_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_seven_objects": {
"acc_norm,none": 0.132,
"acc_norm_stderr,none": 0.021450980824038096,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_seven_objects"
},
"leaderboard_bbh_tracking_shuffled_objects_three_objects": {
"acc_norm,none": 0.28,
"acc_norm_stderr,none": 0.028454148277832318,
"alias": " - leaderboard_bbh_tracking_shuffled_objects_three_objects"
},
"leaderboard_bbh_web_of_lies": {
"acc_norm,none": 0.548,
"acc_norm_stderr,none": 0.03153986449255662,
"alias": " - leaderboard_bbh_web_of_lies"
},
"leaderboard_gpqa": {
"acc_norm,none": 0.3104026845637584,
"acc_norm_stderr,none": 0.013409389307482606,
"alias": " - leaderboard_gpqa"
},
"leaderboard_gpqa_diamond": {
"acc_norm,none": 0.3383838383838384,
"acc_norm_stderr,none": 0.03371124142626303,
"alias": " - leaderboard_gpqa_diamond"
},
"leaderboard_gpqa_extended": {
"acc_norm,none": 0.2948717948717949,
"acc_norm_stderr,none": 0.01953225605335248,
"alias": " - leaderboard_gpqa_extended"
},
"leaderboard_gpqa_main": {
"acc_norm,none": 0.3169642857142857,
"acc_norm_stderr,none": 0.022007621584824805,
"alias": " - leaderboard_gpqa_main"
},
"leaderboard_ifeval": {
"prompt_level_strict_acc,none": 0.2476894639556377,
"prompt_level_strict_acc_stderr,none": 0.018576139285185186,
"inst_level_strict_acc,none": 0.381294964028777,
"inst_level_strict_acc_stderr,none": "N/A",
"prompt_level_loose_acc,none": 0.2902033271719039,
"prompt_level_loose_acc_stderr,none": 0.01953085669122253,
"inst_level_loose_acc,none": 0.42685851318944845,
"inst_level_loose_acc_stderr,none": "N/A",
"alias": " - leaderboard_ifeval"
},
"leaderboard_math_hard": {
"exact_match,none": 0.20694864048338368,
"exact_match_stderr,none": 0.010320240264084115,
"alias": " - leaderboard_math_hard"
},
"leaderboard_math_algebra_hard": {
"exact_match,none": 0.4201954397394137,
"exact_match_stderr,none": 0.028216670555899662,
"alias": " - leaderboard_math_algebra_hard"
},
"leaderboard_math_counting_and_prob_hard": {
"exact_match,none": 0.13821138211382114,
"exact_match_stderr,none": 0.03124585165489032,
"alias": " - leaderboard_math_counting_and_prob_hard"
},
"leaderboard_math_geometry_hard": {
"exact_match,none": 0.09090909090909091,
"exact_match_stderr,none": 0.02511722563616079,
"alias": " - leaderboard_math_geometry_hard"
},
"leaderboard_math_intermediate_algebra_hard": {
"exact_match,none": 0.04285714285714286,
"exact_match_stderr,none": 0.012125450612513616,
"alias": " - leaderboard_math_intermediate_algebra_hard"
},
"leaderboard_math_num_theory_hard": {
"exact_match,none": 0.18181818181818182,
"exact_match_stderr,none": 0.03118156093500157,
"alias": " - leaderboard_math_num_theory_hard"
},
"leaderboard_math_prealgebra_hard": {
"exact_match,none": 0.35751295336787564,
"exact_match_stderr,none": 0.03458816042181005,
"alias": " - leaderboard_math_prealgebra_hard"
},
"leaderboard_math_precalculus_hard": {
"exact_match,none": 0.05185185185185185,
"exact_match_stderr,none": 0.019154368449050517,
"alias": " - leaderboard_math_precalculus_hard"
},
"leaderboard_mmlu_pro": {
"acc,none": 0.3681848404255319,
"acc_stderr,none": 0.00439720900920118,
"alias": " - leaderboard_mmlu_pro"
},
"leaderboard_musr": {
"acc_norm,none": 0.45634920634920634,
"acc_norm_stderr,none": 0.017922847958440576,
"alias": " - leaderboard_musr"
},
"leaderboard_musr_murder_mysteries": {
"acc_norm,none": 0.56,
"acc_norm_stderr,none": 0.03145724452223564,
"alias": " - leaderboard_musr_murder_mysteries"
},
"leaderboard_musr_object_placements": {
"acc_norm,none": 0.44140625,
"acc_norm_stderr,none": 0.031095474260005376,
"alias": " - leaderboard_musr_object_placements"
},
"leaderboard_musr_team_allocation": {
"acc_norm,none": 0.368,
"acc_norm_stderr,none": 0.030562070620993167,
"alias": " - leaderboard_musr_team_allocation"
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
itaylotan/previous_cycle_flow_artifacts__recreation_test__dataset | itaylotan | "2024-09-29T20:00:50Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T20:00:45Z" | ---
dataset_info:
features:
- name: id
dtype: int64
- name: consult_input
dtype: string
- name: turing_output
dtype: string
- name: patient_info
dtype: string
- name: hospital_info
dtype: string
- name: case_info
dtype: string
- name: text_question
dtype: string
- name: difficulty_tags
dtype: string
- name: question_tag
dtype: string
- name: signals_present
dtype: string
- name: signals_not_present
dtype: string
- name: signals_present_id
dtype: string
- name: signals_not_present_id
dtype: string
- name: diagnosis_present
dtype: string
- name: diagnosis_not_present
dtype: string
- name: diagnosis_present_id
dtype: string
- name: diagnosis_not_present_id
dtype: string
- name: recommendations_present
dtype: string
- name: recommendations_not_present
dtype: string
- name: recommendations_present_id
dtype: string
- name: recommendations_not_present_id
dtype: string
splits:
- name: train
num_bytes: 133241595
num_examples: 9000
- name: test
num_bytes: 14850635
num_examples: 1000
download_size: 32164893
dataset_size: 148092230
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
igandhi/20_times_dataset_picture_Hr_split | igandhi | "2024-09-29T20:05:17Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T20:05:14Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': train
'1': val
splits:
- name: train
num_bytes: 15228759.0
num_examples: 400
- name: validation
num_bytes: 3220647.0
num_examples: 85
download_size: 18455477
dataset_size: 18449406.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
unfake/portufake | unfake | "2024-09-29T20:12:43Z" | 0 | 0 | [
"license:mit",
"region:us"
] | null | "2024-09-29T20:12:43Z" | ---
license: mit
---
|
aircrypto/English-French-Translations-Train-Large | aircrypto | "2024-09-29T20:15:40Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T20:15:39Z" | ---
dataset_info:
features:
- name: english
dtype: string
- name: label
dtype: int64
- name: french
dtype: string
splits:
- name: train
num_bytes: 2170453
num_examples: 8000
download_size: 1478247
dataset_size: 2170453
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
harshana95/center_color_psfs_02s0db_synthetic_Flickr2k_cropped_PCA_interp_file | harshana95 | "2024-09-29T21:03:25Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T20:19:06Z" | ---
dataset_info:
features:
- name: gt
dtype: image
- name: blur
dtype: image
splits:
- name: train
num_bytes: 7515219917.32
num_examples: 3020
- name: validation
num_bytes: 73036601.0
num_examples: 31
download_size: 6547196260
dataset_size: 7588256518.32
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
optimal-sampling/qwen-1.5-32B-K-100-test | optimal-sampling | "2024-09-29T20:21:37Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T20:21:27Z" | ---
dataset_info:
features:
- name: problem
dtype: string
- name: level
dtype: string
- name: type
dtype: string
- name: solution
dtype: string
- name: prompt
dtype: string
- name: inference_id
dtype: int64
- name: response
dtype: string
splits:
- name: train
num_bytes: 1246259886
num_examples: 500000
download_size: 241676680
dataset_size: 1246259886
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
eggie5-adyen/test-ds | eggie5-adyen | "2024-09-26T12:54:52Z" | 0 | 0 | [
"language:en",
"region:us"
] | null | "2024-09-29T20:23:19Z" | ---
language:
- en
pretty_name: Adyen Data Agents Benchmark Dataset
dataset_info:
- config_name: answers
features:
- name: task_id
dtype: int64
- name: level
dtype: string
- name: answer
sequence: string
- name: annotator_metadata
dtype: string
- name: file_ids
sequence: int64
splits:
- name: validation
num_bytes: 6915
num_examples: 71
- name: test
num_bytes: 12309
num_examples: 6
download_size: 23417
dataset_size: 19224
- config_name: context_files
features:
- name: file_path
dtype: string
- name: file_type
dtype: string
splits:
- name: default
num_bytes: 713
num_examples: 7
download_size: 27623566
dataset_size: 713
- config_name: questions
features:
- name: task_id
dtype: int64
- name: question
sequence: string
- name: level
dtype: string
- name: file_ids
sequence: int64
splits:
- name: validation
num_bytes: 7077
num_examples: 71
- name: test
num_bytes: 2933
num_examples: 6
download_size: 12450
dataset_size: 10010
- config_name: submissions
features:
- name: agent_name
dtype: string
- name: task_id
dtype: int64
- name: agent_answer
sequence: string
- name: prompt
sequence: string
- name: steps
list:
list:
- name: content
dtype: string
- name: role
dtype: string
- name: tool_calls
list:
- name: function
struct:
- name: arguments
struct:
- name: code
dtype: string
- name: name
dtype: string
- name: type
dtype: string
splits:
- name: validation
num_bytes: 121308
num_examples: 71
- name: test
num_bytes: 28093
num_examples: 6
download_size: 181685
dataset_size: 149401
- config_name: system_scores
features:
- name: agent_name
dtype: string
- name: level
dtype: string
- name: score
dtype: float64
splits:
- name: validation
num_bytes: 77
num_examples: 3
- name: test
num_bytes: 77
num_examples: 3
download_size: 207
dataset_size: 154
- config_name: task_scores
features:
- name: agent_name
dtype: string
- name: task_id
dtype: int64
- name: agent_answer
sequence: string
- name: gold_answer
sequence: string
- name: partial_score
sequence: bool
- name: score
dtype: float64
- name: level
dtype: string
splits:
- name: validation
num_bytes: 12536
num_examples: 71
- name: test
num_bytes: 2737
num_examples: 6
download_size: 20367
dataset_size: 15273
---
# Adyen Data Agents Benchmark Dataset
## Data and leaderboard
|
HuggingFaceH4/10k_prompts_ranked | HuggingFaceH4 | "2024-09-29T21:17:22Z" | 0 | 0 | [
"size_categories:10K<n<100K",
"format:parquet",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T20:49:56Z" | ---
dataset_info:
features:
- name: prompt
list:
- name: content
dtype: string
- name: role
dtype: string
- name: quality
list:
- name: user_id
dtype: string
id: question
- name: value
dtype: string
id: suggestion
- name: status
dtype: string
id: question
- name: metadata
dtype: string
id: metadata
- name: avg_rating
dtype: float64
- name: num_responses
dtype: int64
- name: agreement_ratio
dtype: float64
- name: raw_responses
sequence: int64
- name: kind
dtype: string
- name: cluster_description
dtype: string
- name: topic
dtype: string
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 14700752.673603717
num_examples: 10231
- name: test
num_bytes: 143688.32639628302
num_examples: 100
download_size: 6938601
dataset_size: 14844441.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
joelniklaus/SwissDecisionSummaryTranslations | joelniklaus | "2024-09-29T20:57:10Z" | 0 | 0 | [
"task_categories:translation",
"language:de",
"language:fr",
"language:it",
"size_categories:10K<n<100K",
"format:json",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us",
"legal"
] | [
"translation"
] | "2024-09-29T20:53:17Z" | ---
configs:
- config_name: bge_level
data_files:
- split: train
path: bge_level/train.jsonl.gz
- split: validation
path: bge_level/val.jsonl.gz
- split: test
path: bge_level/test.jsonl.gz
- config_name: regeste_level
data_files:
- split: train
path: regeste_level/train.jsonl.gz
- split: validation
path: regeste_level/val.jsonl.gz
- split: test
path: regeste_level/test.jsonl.gz
- config_name: text_level
data_files:
- split: train
path: text_level/train.jsonl.gz
- split: validation
path: text_level/val.jsonl.gz
- split: test
path: text_level/test.jsonl.gz
task_categories:
- translation
language:
- de
- fr
- it
tags:
- legal
pretty_name: Swiss Decision Summary Translations
size_categories:
- 10K<n<100K
---
# Dataset Card for Swiss Decision Summary Translations
This dataset card aims to describe a dataset from summaries of Swiss leading court decisions (also known as "BGE" or "Bundesgerichtsentscheide") from the Swiss Supreme Court: [https://bger.ch](https://bger.ch). It is a parallel dataset that offers translations at three different levels: the entire summary (bge_level), the regeste level (regeste_level), and the text level (text_level).
## Dataset Details
### Dataset Description
This dataset consists of summaries of leading Swiss Supreme Court decisions. These summaries are provided in three languages (German, French, Italian) and are available at three distinct levels of granularity:
- **bge_level**: The entire summary of the court decision.
- **regeste_level**: A summary focused on the core legal issue.
- **text_level**: A further detailed extraction of specific legal statements.
The dataset can be used for various natural language processing tasks such as multilingual text alignment, machine translation, and legal text analysis.
- **Curated by:** Joel Niklaus
- **Funded by [optional]:** Swiss Federal Supreme Court
- **Shared by [optional]:** Joel Niklaus
- **Language(s) (NLP):** German (de), French (fr), Italian (it)
- **License:** [More Information Needed]
### Dataset Sources [optional]
- **Repository:** https://github.com/JoelNiklaus/SwissLegalTranslations
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
### Direct Use
This dataset can be used for:
- Multilingual text alignment and translation tasks between German, French, and Italian.
- Legal text analysis and summarization, with a focus on Swiss law.
- Language model training or fine-tuning on legal domain-specific text.
### Out-of-Scope Use
The dataset is not suitable for tasks unrelated to legal text processing or without consideration for the specific legal context of Swiss Supreme Court decisions.
## Dataset Structure
The dataset is structured into three levels:
1. **bge_level**: Contains full summaries of court decisions in German, French, and Italian.
- Fields:
- `bge`: Case identifier.
- `year`: Year of the decision.
- `volume`: Volume number of the decision.
- `pageNumber`: Page number of the decision.
- `de_bgeText`: Full summary in German.
- `fr_bgeText`: Full summary in French.
- `it_bgeText`: Full summary in Italian.
2. **regeste_level**: Contains regeste summaries (core legal issues) in the three languages.
- Fields:
- `bge`: Case identifier.
- `year`: Year of the decision.
- `volume`: Volume number of the decision.
- `pageNumber`: Page number of the decision.
- `regesteNumber`: Number assigned to the regeste.
- `de_regesteTitle`: Regeste title in German.
- `fr_regesteTitle`: Regeste title in French.
- `it_regesteTitle`: Regeste title in Italian.
- `de_regesteText`: Regeste text in German.
- `fr_regesteText`: Regeste text in French.
- `it_regesteText`: Regeste text in Italian.
3. **text_level**: Contains more detailed text extracts from the decisions, available in all three languages.
- Fields:
- `bge`: Case identifier.
- `year`: Year of the decision.
- `volume`: Volume number of the decision.
- `pageNumber`: Page number of the decision.
- `regesteNumber`: Number assigned to the regeste.
- `textNumber`: Number assigned to the specific text extract.
- `de_text`: Text extract in German.
- `fr_text`: Text extract in French.
- `it_text`: Text extract in Italian.
### Example Code
```python
from datasets import load_dataset
# Load the datasets for each level
bge_dataset = load_dataset('joelniklaus/SwissDecisionSummaryTranslations', name='bge_level', trust_remote_code=True)
regeste_dataset = load_dataset('joelniklaus/SwissDecisionSummaryTranslations', name='regeste_level', trust_remote_code=True)
text_dataset = load_dataset('joelniklaus/SwissDecisionSummaryTranslations', name='text_level', trust_remote_code=True)
# Print datasets for inspection
print(bge_dataset, regeste_dataset, text_dataset)
```
## Dataset Creation
### Curation Rationale
The dataset was created to enable the development of multilingual models for the legal domain, specifically focused on Swiss law. By providing translations at multiple levels of granularity, it supports a range of use cases from high-level summaries to detailed legal text analysis.
### Source Data
The dataset is derived from official Swiss Supreme Court decision summaries, which are publicly available on [bger.ch](https://bger.ch).
#### Data Collection and Processing
The summaries were extracted and processed from official court records, normalized across the three languages (German, French, Italian), and structured into the different levels (bge_level, regeste_level, text_level). The dataset was cleaned and formatted into JSONL format for ease of use.
#### Who are the source data producers?
The source data is produced by the Swiss Federal Supreme Court, which publishes leading court decisions in multiple languages (German, French, and Italian).
### Annotations [optional]
No additional annotations have been added beyond the original translations provided by the Swiss Supreme Court.
#### Personal and Sensitive Information
The dataset does not contain any personal, sensitive, or private information as it consists solely of legal summaries from public court decisions.
## Bias, Risks, and Limitations
The dataset may reflect biases inherent in the Swiss legal system and judicial processes. Users should be aware that the dataset only represents leading decisions from the Swiss Supreme Court, which may not reflect the entirety of Swiss law or legal perspectives.
### Recommendations
Users should consider the legal context and jurisdiction-specific nature of the dataset when applying models trained on this data. The dataset is best used for tasks related to legal language and multilingual processing in the legal domain.
## Citation [optional]
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
- **BGE**: Bundesgerichtsentscheide (leading court decisions from the Swiss Supreme Court).
- **Regeste**: A concise summary of the legal issue(s) in a court decision.
## More Information [optional]
[More Information Needed]
## Dataset Card Authors
Joel Niklaus
## Dataset Card Contact
[Joel Niklaus](mailto:joel@niklaus.ai)
|
kongzym/gsm8k_merge | kongzym | "2024-09-29T23:24:14Z" | 0 | 0 | [
"license:mit",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:03:20Z" | ---
license: mit
configs:
- config_name: default
data_files:
- split: test
path: "main/gsm8k_debug.json"
---
|
pdf2dataset/8862ce00de9483194499e7734d02630f | pdf2dataset | "2024-09-29T21:07:31Z" | 0 | 0 | [
"size_categories:n<1K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:07:30Z" | ---
dataset_info:
features:
- name: text
dtype: string
- name: source
dtype: string
splits:
- name: train
num_bytes: 275605
num_examples: 92
download_size: 106742
dataset_size: 275605
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tukey/documents_letter_1000 | tukey | "2024-09-30T01:24:19Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:07:55Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: answer
dtype: string
splits:
- name: train
num_bytes: 211446671.0
num_examples: 1000
download_size: 210971552
dataset_size: 211446671.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Jonathan-Zhou/GameLabel-10k | Jonathan-Zhou | "2024-09-29T22:40:38Z" | 0 | 0 | [
"language:en",
"license:apache-2.0",
"size_categories:1K<n<10K",
"format:csv",
"modality:tabular",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:09:40Z" | ---
license: apache-2.0
language:
- en
size_categories:
- 1K<n<10K
---
This dataset contains was created in collaboration with the game developers of Armchair Commander. It contains 9800 human preferences over pairs of Flux-Schnell generated images, with over 6800 unique prompts. Image data is base64 encoded, and images with more votes are more preferred over their counterparts. Paper coming soon! |
tukey/documents_scientific_publications_2000 | tukey | "2024-09-29T21:19:39Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:09:42Z" | ---
dataset_info:
features:
- name: image
dtype: image
splits:
- name: train
num_bytes: 786751102.681199
num_examples: 2000
download_size: 789461685
dataset_size: 786751102.681199
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tukey/documents_scientific_report_2000 | tukey | "2024-09-29T21:28:34Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:12:31Z" | ---
dataset_info:
features:
- name: image
dtype: image
splits:
- name: train
num_bytes: 401651678.6004804
num_examples: 2000
download_size: 405516606
dataset_size: 401651678.6004804
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tukey/documents_email_1000 | tukey | "2024-09-30T00:51:12Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:12:54Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: answer
dtype: string
splits:
- name: train
num_bytes: 100916189.0
num_examples: 1000
download_size: 99678202
dataset_size: 100916189.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tukey/documents_news_article_2000 | tukey | "2024-09-29T21:18:43Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:13:17Z" | ---
dataset_info:
features:
- name: image
dtype: image
splits:
- name: train
num_bytes: 752552554.1015022
num_examples: 2000
download_size: 754240082
dataset_size: 752552554.1015022
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
jayavibhav/text2sql-cleaned | jayavibhav | "2024-09-29T21:21:44Z" | 0 | 0 | [
"size_categories:100K<n<1M",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:21:38Z" | ---
dataset_info:
features:
- name: source
dtype: string
- name: conversations
list:
- name: from
dtype: string
- name: value
dtype: string
splits:
- name: train
num_bytes: 328939238
num_examples: 262208
download_size: 83829798
dataset_size: 328939238
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tukey/sampled-arxiv-ocr001_2000 | tukey | "2024-09-30T00:59:06Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:21:46Z" | ---
dataset_info:
features:
- name: id
dtype: string
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
- name: images
list:
- name: bytes
dtype: binary
- name: path
dtype: 'null'
splits:
- name: train
num_bytes: 1629830361
num_examples: 2000
download_size: 1477730572
dataset_size: 1629830361
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
tukey/documents_handwritten_2000 | tukey | "2024-09-29T21:26:48Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:image",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:22:31Z" | ---
dataset_info:
features:
- name: image
dtype: image
- name: answer
dtype: string
splits:
- name: train
num_bytes: 463779958.9257504
num_examples: 2000
download_size: 456813078
dataset_size: 463779958.9257504
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
HuggingFaceTB/mcf-qa-prompt-explanation | HuggingFaceTB | "2024-09-29T21:28:17Z" | 0 | 0 | [
"size_categories:1K<n<10K",
"format:parquet",
"modality:text",
"library:datasets",
"library:pandas",
"library:mlcroissant",
"library:polars",
"region:us"
] | null | "2024-09-29T21:28:16Z" | ---
dataset_info:
features:
- name: topic
dtype: string
- name: subtopic
dtype: string
- name: subsubtopic
dtype: string
- name: full_topic
dtype: string
- name: prompt_everyday
dtype: string
- name: prompt_generic
dtype: string
- name: prompt_explanation
dtype: string
- name: llm_response
dtype: string
splits:
- name: train_sft
num_bytes: 7248720
num_examples: 2260
download_size: 1864577
dataset_size: 7248720
configs:
- config_name: default
data_files:
- split: train_sft
path: data/train_sft-*
---
|
Vipitis/Shadereval-results | Vipitis | "2024-09-29T21:40:34Z" | 0 | 0 | [
"license:apache-2.0",
"size_categories:n<1K",
"format:json",
"modality:text",
"library:datasets",
"library:dask",
"library:mlcroissant",
"region:us"
] | null | "2024-09-29T21:28:57Z" | ---
license: apache-2.0
---
## WIP eval results
kinda motivated by the advice in https://huggingface.co/docs/leaderboards/leaderboards/building_page but also doing it myself some parts of the way.
the task can be found here: https://github.com/bigcode-project/bigcode-evaluation-harness/pull/173 the associated dataset is here: https://huggingface.co/datasets/Vipitis/Shadereval-experiments-dev
metric and likely leaderboard will be here (once updated): https://huggingface.co/spaces/Vipitis/shadermatch
plenty of stuff to figure out here. |