picocreator commited on
Commit
4e35bff
1 Parent(s): 9e889b2

eval update

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. lm-eval-output/RWKV/v5-Eagle-7B-HF/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +9 -9
  2. lm-eval-output/RWKV/v5-Eagle-7B-HF/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +2 -2
  3. lm-eval-output/SmerkyG/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +132 -0
  4. lm-eval-output/SmerkyG/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  5. lm-eval-output/SmerkyG/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +161 -0
  6. lm-eval-output/SmerkyG/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  7. lm-eval-output/SmerkyG/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +2249 -0
  8. lm-eval-output/SmerkyG/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  9. lm-eval-output/SmerkyG/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -0
  10. lm-eval-output/SmerkyG/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  11. lm-eval-output/SmerkyG/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +58 -0
  12. lm-eval-output/SmerkyG/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  13. lm-eval-output/SmerkyG/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +374 -0
  14. lm-eval-output/SmerkyG/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  15. lm-eval-output/SmerkyG/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +67 -0
  16. lm-eval-output/SmerkyG/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  17. lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +126 -0
  18. lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  19. lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +252 -0
  20. lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  21. lm-eval-output/SmerkyG/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +2594 -0
  22. lm-eval-output/SmerkyG/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  23. lm-eval-output/SmerkyG/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +66 -0
  24. lm-eval-output/SmerkyG/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  25. lm-eval-output/SmerkyG/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +283 -0
  26. lm-eval-output/SmerkyG/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  27. lm-eval-output/SmerkyG/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +64 -0
  28. lm-eval-output/SmerkyG/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  29. lm-eval-output/SmerkyG/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +0 -0
  30. lm-eval-output/SmerkyG/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  31. lm-eval-output/SmerkyG/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +67 -0
  32. lm-eval-output/SmerkyG/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  33. lm-eval-output/SmerkyG/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +65 -0
  34. lm-eval-output/SmerkyG/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  35. lm-eval-output/SmerkyG/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +58 -0
  36. lm-eval-output/SmerkyG/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  37. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +390 -0
  38. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  39. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +548 -0
  40. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  41. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +423 -0
  42. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  43. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +248 -0
  44. lm-eval-output/SmerkyG/rwkv-5-world-1b5/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  45. lm-eval-output/SmerkyG/rwkv-5-world-3b/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +132 -0
  46. lm-eval-output/SmerkyG/rwkv-5-world-3b/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  47. lm-eval-output/SmerkyG/rwkv-5-world-3b/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +161 -0
  48. lm-eval-output/SmerkyG/rwkv-5-world-3b/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
  49. lm-eval-output/SmerkyG/rwkv-5-world-3b/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json +2249 -0
  50. lm-eval-output/SmerkyG/rwkv-5-world-3b/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log +3 -0
lm-eval-output/RWKV/v5-Eagle-7B-HF/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json CHANGED
@@ -1,30 +1,30 @@
1
  {
2
  "results": {
3
  "anli": {
4
- "acc,none": 0.359375,
5
- "acc_stderr,none": 0.0176485793476215,
6
  "alias": "anli"
7
  },
8
  "anli_r1": {
9
  "acc,none": 0.38,
10
- "acc_stderr,none": 0.015356947477797573,
11
  "alias": " - anli_r1"
12
  },
13
  "anli_r2": {
14
  "acc,none": 0.345,
15
- "acc_stderr,none": 0.015039986742055235,
16
  "alias": " - anli_r2"
17
  },
18
  "anli_r3": {
19
- "acc,none": 0.3541666666666667,
20
- "acc_stderr,none": 0.01381193349957096,
21
  "alias": " - anli_r3"
22
  }
23
  },
24
  "groups": {
25
  "anli": {
26
- "acc,none": 0.359375,
27
- "acc_stderr,none": 0.0176485793476215,
28
  "alias": "anli"
29
  }
30
  },
@@ -157,5 +157,5 @@
157
  "bootstrap_iters": 100000,
158
  "gen_kwargs": null
159
  },
160
- "git_hash": "71d574c"
161
  }
 
1
  {
2
  "results": {
3
  "anli": {
4
+ "acc,none": 0.3590625,
5
+ "acc_stderr,none": 0.017704453505961653,
6
  "alias": "anli"
7
  },
8
  "anli_r1": {
9
  "acc,none": 0.38,
10
+ "acc_stderr,none": 0.015356947477797577,
11
  "alias": " - anli_r1"
12
  },
13
  "anli_r2": {
14
  "acc,none": 0.345,
15
+ "acc_stderr,none": 0.015039986742055237,
16
  "alias": " - anli_r2"
17
  },
18
  "anli_r3": {
19
+ "acc,none": 0.35333333333333333,
20
+ "acc_stderr,none": 0.013804572162314933,
21
  "alias": " - anli_r3"
22
  }
23
  },
24
  "groups": {
25
  "anli": {
26
+ "acc,none": 0.3590625,
27
+ "acc_stderr,none": 0.017704453505961653,
28
  "alias": "anli"
29
  }
30
  },
 
157
  "bootstrap_iters": 100000,
158
  "gen_kwargs": null
159
  },
160
+ "git_hash": "1ee41f7"
161
  }
lm-eval-output/RWKV/v5-Eagle-7B-HF/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d161c0f981d172f60e730ec0392d59bc6164bc95b6e30b5acaf60f3fdd3b433f
3
- size 79965
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef7c16a50e1dd8570ebfebb583f105c944453ead8884e1c0d67fe9c41ade6a45
3
+ size 159064
lm-eval-output/SmerkyG/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "ai2_arc": {
4
+ "acc,none": 0.512119503945885,
5
+ "acc_stderr,none": 0.10742116000119395,
6
+ "acc_norm,none": 0.49408117249154454,
7
+ "acc_norm_stderr,none": 0.07753732451937403,
8
+ "alias": "ai2_arc"
9
+ },
10
+ "arc_challenge": {
11
+ "acc,none": 0.28498293515358364,
12
+ "acc_stderr,none": 0.013191348179838793,
13
+ "acc_norm,none": 0.3310580204778157,
14
+ "acc_norm_stderr,none": 0.01375206241981783,
15
+ "alias": " - arc_challenge"
16
+ },
17
+ "arc_easy": {
18
+ "acc,none": 0.6241582491582491,
19
+ "acc_stderr,none": 0.009938436373170633,
20
+ "acc_norm,none": 0.5744949494949495,
21
+ "acc_norm_stderr,none": 0.010145271182591033,
22
+ "alias": " - arc_easy"
23
+ }
24
+ },
25
+ "groups": {
26
+ "ai2_arc": {
27
+ "acc,none": 0.512119503945885,
28
+ "acc_stderr,none": 0.10742116000119395,
29
+ "acc_norm,none": 0.49408117249154454,
30
+ "acc_norm_stderr,none": 0.07753732451937403,
31
+ "alias": "ai2_arc"
32
+ }
33
+ },
34
+ "configs": {
35
+ "arc_challenge": {
36
+ "task": "arc_challenge",
37
+ "group": [
38
+ "ai2_arc"
39
+ ],
40
+ "dataset_path": "allenai/ai2_arc",
41
+ "dataset_name": "ARC-Challenge",
42
+ "training_split": "train",
43
+ "validation_split": "validation",
44
+ "test_split": "test",
45
+ "doc_to_text": "Question: {{question}}\nAnswer:",
46
+ "doc_to_target": "{{choices.label.index(answerKey)}}",
47
+ "doc_to_choice": "{{choices.text}}",
48
+ "description": "",
49
+ "target_delimiter": " ",
50
+ "fewshot_delimiter": "\n\n",
51
+ "metric_list": [
52
+ {
53
+ "metric": "acc",
54
+ "aggregation": "mean",
55
+ "higher_is_better": true
56
+ },
57
+ {
58
+ "metric": "acc_norm",
59
+ "aggregation": "mean",
60
+ "higher_is_better": true
61
+ }
62
+ ],
63
+ "output_type": "multiple_choice",
64
+ "repeats": 1,
65
+ "should_decontaminate": true,
66
+ "doc_to_decontamination_query": "Question: {{question}}\nAnswer:",
67
+ "metadata": {
68
+ "version": 1.0
69
+ }
70
+ },
71
+ "arc_easy": {
72
+ "task": "arc_easy",
73
+ "group": [
74
+ "ai2_arc"
75
+ ],
76
+ "dataset_path": "allenai/ai2_arc",
77
+ "dataset_name": "ARC-Easy",
78
+ "training_split": "train",
79
+ "validation_split": "validation",
80
+ "test_split": "test",
81
+ "doc_to_text": "Question: {{question}}\nAnswer:",
82
+ "doc_to_target": "{{choices.label.index(answerKey)}}",
83
+ "doc_to_choice": "{{choices.text}}",
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "metric_list": [
88
+ {
89
+ "metric": "acc",
90
+ "aggregation": "mean",
91
+ "higher_is_better": true
92
+ },
93
+ {
94
+ "metric": "acc_norm",
95
+ "aggregation": "mean",
96
+ "higher_is_better": true
97
+ }
98
+ ],
99
+ "output_type": "multiple_choice",
100
+ "repeats": 1,
101
+ "should_decontaminate": true,
102
+ "doc_to_decontamination_query": "Question: {{question}}\nAnswer:",
103
+ "metadata": {
104
+ "version": 1.0
105
+ }
106
+ }
107
+ },
108
+ "versions": {
109
+ "ai2_arc": "N/A",
110
+ "arc_challenge": 1.0,
111
+ "arc_easy": 1.0
112
+ },
113
+ "n-shot": {
114
+ "ai2_arc": 0,
115
+ "arc_challenge": 0,
116
+ "arc_easy": 0
117
+ },
118
+ "config": {
119
+ "model": "hf",
120
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
121
+ "batch_size": "auto",
122
+ "batch_sizes": [
123
+ 64
124
+ ],
125
+ "device": null,
126
+ "use_cache": null,
127
+ "limit": null,
128
+ "bootstrap_iters": 100000,
129
+ "gen_kwargs": null
130
+ },
131
+ "git_hash": "1ee41f7"
132
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70498c2bbc6277b14857387a1cb10f42fdaa43ffad760b6a120585e3cc73d959
3
+ size 48938
lm-eval-output/SmerkyG/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "anli": {
4
+ "acc,none": 0.3446875,
5
+ "acc_stderr,none": 0.016201421596492432,
6
+ "alias": "anli"
7
+ },
8
+ "anli_r1": {
9
+ "acc,none": 0.358,
10
+ "acc_stderr,none": 0.01516792886540756,
11
+ "alias": " - anli_r1"
12
+ },
13
+ "anli_r2": {
14
+ "acc,none": 0.33,
15
+ "acc_stderr,none": 0.014876872027456727,
16
+ "alias": " - anli_r2"
17
+ },
18
+ "anli_r3": {
19
+ "acc,none": 0.3458333333333333,
20
+ "acc_stderr,none": 0.013736245342311012,
21
+ "alias": " - anli_r3"
22
+ }
23
+ },
24
+ "groups": {
25
+ "anli": {
26
+ "acc,none": 0.3446875,
27
+ "acc_stderr,none": 0.016201421596492432,
28
+ "alias": "anli"
29
+ }
30
+ },
31
+ "configs": {
32
+ "anli_r1": {
33
+ "task": "anli_r1",
34
+ "group": [
35
+ "anli"
36
+ ],
37
+ "dataset_path": "anli",
38
+ "training_split": "train_r1",
39
+ "validation_split": "dev_r1",
40
+ "test_split": "test_r1",
41
+ "doc_to_text": "{{premise}}\nQuestion: {{hypothesis}} True, False, or Neither?\nAnswer:",
42
+ "doc_to_target": "{{['True', 'Neither', 'False'][label]}}",
43
+ "doc_to_choice": [
44
+ "True",
45
+ "Neither",
46
+ "False"
47
+ ],
48
+ "description": "",
49
+ "target_delimiter": " ",
50
+ "fewshot_delimiter": "\n\n",
51
+ "metric_list": [
52
+ {
53
+ "metric": "acc",
54
+ "aggregation": "mean",
55
+ "higher_is_better": true
56
+ }
57
+ ],
58
+ "output_type": "multiple_choice",
59
+ "repeats": 1,
60
+ "should_decontaminate": true,
61
+ "doc_to_decontamination_query": "premise",
62
+ "metadata": {
63
+ "version": 1.0
64
+ }
65
+ },
66
+ "anli_r2": {
67
+ "task": "anli_r2",
68
+ "group": [
69
+ "anli"
70
+ ],
71
+ "dataset_path": "anli",
72
+ "training_split": "train_r2",
73
+ "validation_split": "dev_r2",
74
+ "test_split": "test_r2",
75
+ "doc_to_text": "{{premise}}\nQuestion: {{hypothesis}} True, False, or Neither?\nAnswer:",
76
+ "doc_to_target": "{{['True', 'Neither', 'False'][label]}}",
77
+ "doc_to_choice": [
78
+ "True",
79
+ "Neither",
80
+ "False"
81
+ ],
82
+ "description": "",
83
+ "target_delimiter": " ",
84
+ "fewshot_delimiter": "\n\n",
85
+ "metric_list": [
86
+ {
87
+ "metric": "acc",
88
+ "aggregation": "mean",
89
+ "higher_is_better": true
90
+ }
91
+ ],
92
+ "output_type": "multiple_choice",
93
+ "repeats": 1,
94
+ "should_decontaminate": true,
95
+ "doc_to_decontamination_query": "premise",
96
+ "metadata": {
97
+ "version": 1.0
98
+ }
99
+ },
100
+ "anli_r3": {
101
+ "task": "anli_r3",
102
+ "group": [
103
+ "anli"
104
+ ],
105
+ "dataset_path": "anli",
106
+ "training_split": "train_r3",
107
+ "validation_split": "dev_r3",
108
+ "test_split": "test_r3",
109
+ "doc_to_text": "{{premise}}\nQuestion: {{hypothesis}} True, False, or Neither?\nAnswer:",
110
+ "doc_to_target": "{{['True', 'Neither', 'False'][label]}}",
111
+ "doc_to_choice": [
112
+ "True",
113
+ "Neither",
114
+ "False"
115
+ ],
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "metric_list": [
120
+ {
121
+ "metric": "acc",
122
+ "aggregation": "mean",
123
+ "higher_is_better": true
124
+ }
125
+ ],
126
+ "output_type": "multiple_choice",
127
+ "repeats": 1,
128
+ "should_decontaminate": true,
129
+ "doc_to_decontamination_query": "premise",
130
+ "metadata": {
131
+ "version": 1.0
132
+ }
133
+ }
134
+ },
135
+ "versions": {
136
+ "anli": "N/A",
137
+ "anli_r1": 1.0,
138
+ "anli_r2": 1.0,
139
+ "anli_r3": 1.0
140
+ },
141
+ "n-shot": {
142
+ "anli": 0,
143
+ "anli_r1": 0,
144
+ "anli_r2": 0,
145
+ "anli_r3": 0
146
+ },
147
+ "config": {
148
+ "model": "hf",
149
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
150
+ "batch_size": "auto",
151
+ "batch_sizes": [
152
+ 64
153
+ ],
154
+ "device": null,
155
+ "use_cache": null,
156
+ "limit": null,
157
+ "bootstrap_iters": 100000,
158
+ "gen_kwargs": null
159
+ },
160
+ "git_hash": "1ee41f7"
161
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e81f489289541497f6c037de418a934e664fce533485d8aa44fdd232df89245e
3
+ size 42769
lm-eval-output/SmerkyG/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,2249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "blimp": {
4
+ "acc,none": 0.8336119402985075,
5
+ "acc_stderr,none": 0.1509763959549486,
6
+ "alias": "blimp"
7
+ },
8
+ "blimp_adjunct_island": {
9
+ "acc,none": 0.9,
10
+ "acc_stderr,none": 0.00949157995752507,
11
+ "alias": " - blimp_adjunct_island"
12
+ },
13
+ "blimp_anaphor_gender_agreement": {
14
+ "acc,none": 0.992,
15
+ "acc_stderr,none": 0.0028185003005045057,
16
+ "alias": " - blimp_anaphor_gender_agreement"
17
+ },
18
+ "blimp_anaphor_number_agreement": {
19
+ "acc,none": 0.995,
20
+ "acc_stderr,none": 0.00223158687484488,
21
+ "alias": " - blimp_anaphor_number_agreement"
22
+ },
23
+ "blimp_animate_subject_passive": {
24
+ "acc,none": 0.797,
25
+ "acc_stderr,none": 0.012726073744598275,
26
+ "alias": " - blimp_animate_subject_passive"
27
+ },
28
+ "blimp_animate_subject_trans": {
29
+ "acc,none": 0.907,
30
+ "acc_stderr,none": 0.009188875634996693,
31
+ "alias": " - blimp_animate_subject_trans"
32
+ },
33
+ "blimp_causative": {
34
+ "acc,none": 0.779,
35
+ "acc_stderr,none": 0.013127502859696244,
36
+ "alias": " - blimp_causative"
37
+ },
38
+ "blimp_complex_NP_island": {
39
+ "acc,none": 0.654,
40
+ "acc_stderr,none": 0.015050266127564441,
41
+ "alias": " - blimp_complex_NP_island"
42
+ },
43
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
44
+ "acc,none": 0.742,
45
+ "acc_stderr,none": 0.013842963108656603,
46
+ "alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
47
+ },
48
+ "blimp_coordinate_structure_constraint_object_extraction": {
49
+ "acc,none": 0.85,
50
+ "acc_stderr,none": 0.0112972398234093,
51
+ "alias": " - blimp_coordinate_structure_constraint_object_extraction"
52
+ },
53
+ "blimp_determiner_noun_agreement_1": {
54
+ "acc,none": 0.998,
55
+ "acc_stderr,none": 0.001413505570557816,
56
+ "alias": " - blimp_determiner_noun_agreement_1"
57
+ },
58
+ "blimp_determiner_noun_agreement_2": {
59
+ "acc,none": 0.991,
60
+ "acc_stderr,none": 0.002987963843142644,
61
+ "alias": " - blimp_determiner_noun_agreement_2"
62
+ },
63
+ "blimp_determiner_noun_agreement_irregular_1": {
64
+ "acc,none": 0.963,
65
+ "acc_stderr,none": 0.005972157622389635,
66
+ "alias": " - blimp_determiner_noun_agreement_irregular_1"
67
+ },
68
+ "blimp_determiner_noun_agreement_irregular_2": {
69
+ "acc,none": 0.955,
70
+ "acc_stderr,none": 0.0065588122414061405,
71
+ "alias": " - blimp_determiner_noun_agreement_irregular_2"
72
+ },
73
+ "blimp_determiner_noun_agreement_with_adj_2": {
74
+ "acc,none": 0.961,
75
+ "acc_stderr,none": 0.006125072776426103,
76
+ "alias": " - blimp_determiner_noun_agreement_with_adj_2"
77
+ },
78
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
79
+ "acc,none": 0.929,
80
+ "acc_stderr,none": 0.008125578442487924,
81
+ "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
82
+ },
83
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
84
+ "acc,none": 0.924,
85
+ "acc_stderr,none": 0.008384169266796398,
86
+ "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
87
+ },
88
+ "blimp_determiner_noun_agreement_with_adjective_1": {
89
+ "acc,none": 0.982,
90
+ "acc_stderr,none": 0.004206387249611461,
91
+ "alias": " - blimp_determiner_noun_agreement_with_adjective_1"
92
+ },
93
+ "blimp_distractor_agreement_relational_noun": {
94
+ "acc,none": 0.881,
95
+ "acc_stderr,none": 0.010244215145336667,
96
+ "alias": " - blimp_distractor_agreement_relational_noun"
97
+ },
98
+ "blimp_distractor_agreement_relative_clause": {
99
+ "acc,none": 0.797,
100
+ "acc_stderr,none": 0.01272607374459827,
101
+ "alias": " - blimp_distractor_agreement_relative_clause"
102
+ },
103
+ "blimp_drop_argument": {
104
+ "acc,none": 0.806,
105
+ "acc_stderr,none": 0.012510816141264366,
106
+ "alias": " - blimp_drop_argument"
107
+ },
108
+ "blimp_ellipsis_n_bar_1": {
109
+ "acc,none": 0.852,
110
+ "acc_stderr,none": 0.011234866364235261,
111
+ "alias": " - blimp_ellipsis_n_bar_1"
112
+ },
113
+ "blimp_ellipsis_n_bar_2": {
114
+ "acc,none": 0.883,
115
+ "acc_stderr,none": 0.010169287802713327,
116
+ "alias": " - blimp_ellipsis_n_bar_2"
117
+ },
118
+ "blimp_existential_there_object_raising": {
119
+ "acc,none": 0.843,
120
+ "acc_stderr,none": 0.011510146979230177,
121
+ "alias": " - blimp_existential_there_object_raising"
122
+ },
123
+ "blimp_existential_there_quantifiers_1": {
124
+ "acc,none": 0.989,
125
+ "acc_stderr,none": 0.0032999833166078166,
126
+ "alias": " - blimp_existential_there_quantifiers_1"
127
+ },
128
+ "blimp_existential_there_quantifiers_2": {
129
+ "acc,none": 0.27,
130
+ "acc_stderr,none": 0.014046255632633915,
131
+ "alias": " - blimp_existential_there_quantifiers_2"
132
+ },
133
+ "blimp_existential_there_subject_raising": {
134
+ "acc,none": 0.928,
135
+ "acc_stderr,none": 0.008178195576218681,
136
+ "alias": " - blimp_existential_there_subject_raising"
137
+ },
138
+ "blimp_expletive_it_object_raising": {
139
+ "acc,none": 0.827,
140
+ "acc_stderr,none": 0.011967214137559927,
141
+ "alias": " - blimp_expletive_it_object_raising"
142
+ },
143
+ "blimp_inchoative": {
144
+ "acc,none": 0.696,
145
+ "acc_stderr,none": 0.014553205687950436,
146
+ "alias": " - blimp_inchoative"
147
+ },
148
+ "blimp_intransitive": {
149
+ "acc,none": 0.856,
150
+ "acc_stderr,none": 0.01110798754893915,
151
+ "alias": " - blimp_intransitive"
152
+ },
153
+ "blimp_irregular_past_participle_adjectives": {
154
+ "acc,none": 0.994,
155
+ "acc_stderr,none": 0.002443352199329801,
156
+ "alias": " - blimp_irregular_past_participle_adjectives"
157
+ },
158
+ "blimp_irregular_past_participle_verbs": {
159
+ "acc,none": 0.915,
160
+ "acc_stderr,none": 0.008823426366942305,
161
+ "alias": " - blimp_irregular_past_participle_verbs"
162
+ },
163
+ "blimp_irregular_plural_subject_verb_agreement_1": {
164
+ "acc,none": 0.937,
165
+ "acc_stderr,none": 0.007687007876286419,
166
+ "alias": " - blimp_irregular_plural_subject_verb_agreement_1"
167
+ },
168
+ "blimp_irregular_plural_subject_verb_agreement_2": {
169
+ "acc,none": 0.927,
170
+ "acc_stderr,none": 0.00823035471524406,
171
+ "alias": " - blimp_irregular_plural_subject_verb_agreement_2"
172
+ },
173
+ "blimp_left_branch_island_echo_question": {
174
+ "acc,none": 0.45,
175
+ "acc_stderr,none": 0.015740004693383852,
176
+ "alias": " - blimp_left_branch_island_echo_question"
177
+ },
178
+ "blimp_left_branch_island_simple_question": {
179
+ "acc,none": 0.851,
180
+ "acc_stderr,none": 0.011266140684632156,
181
+ "alias": " - blimp_left_branch_island_simple_question"
182
+ },
183
+ "blimp_matrix_question_npi_licensor_present": {
184
+ "acc,none": 0.708,
185
+ "acc_stderr,none": 0.014385511563477343,
186
+ "alias": " - blimp_matrix_question_npi_licensor_present"
187
+ },
188
+ "blimp_npi_present_1": {
189
+ "acc,none": 0.577,
190
+ "acc_stderr,none": 0.015630589090476345,
191
+ "alias": " - blimp_npi_present_1"
192
+ },
193
+ "blimp_npi_present_2": {
194
+ "acc,none": 0.668,
195
+ "acc_stderr,none": 0.01489959724281148,
196
+ "alias": " - blimp_npi_present_2"
197
+ },
198
+ "blimp_only_npi_licensor_present": {
199
+ "acc,none": 0.971,
200
+ "acc_stderr,none": 0.005309160685757018,
201
+ "alias": " - blimp_only_npi_licensor_present"
202
+ },
203
+ "blimp_only_npi_scope": {
204
+ "acc,none": 0.733,
205
+ "acc_stderr,none": 0.013996674851796273,
206
+ "alias": " - blimp_only_npi_scope"
207
+ },
208
+ "blimp_passive_1": {
209
+ "acc,none": 0.907,
210
+ "acc_stderr,none": 0.009188875634996697,
211
+ "alias": " - blimp_passive_1"
212
+ },
213
+ "blimp_passive_2": {
214
+ "acc,none": 0.908,
215
+ "acc_stderr,none": 0.0091443763931511,
216
+ "alias": " - blimp_passive_2"
217
+ },
218
+ "blimp_principle_A_c_command": {
219
+ "acc,none": 0.839,
220
+ "acc_stderr,none": 0.011628164696727193,
221
+ "alias": " - blimp_principle_A_c_command"
222
+ },
223
+ "blimp_principle_A_case_1": {
224
+ "acc,none": 1.0,
225
+ "acc_stderr,none": 0.0,
226
+ "alias": " - blimp_principle_A_case_1"
227
+ },
228
+ "blimp_principle_A_case_2": {
229
+ "acc,none": 0.965,
230
+ "acc_stderr,none": 0.005814534272734976,
231
+ "alias": " - blimp_principle_A_case_2"
232
+ },
233
+ "blimp_principle_A_domain_1": {
234
+ "acc,none": 0.994,
235
+ "acc_stderr,none": 0.0024433521993298415,
236
+ "alias": " - blimp_principle_A_domain_1"
237
+ },
238
+ "blimp_principle_A_domain_2": {
239
+ "acc,none": 0.9,
240
+ "acc_stderr,none": 0.009491579957525054,
241
+ "alias": " - blimp_principle_A_domain_2"
242
+ },
243
+ "blimp_principle_A_domain_3": {
244
+ "acc,none": 0.756,
245
+ "acc_stderr,none": 0.013588548437881418,
246
+ "alias": " - blimp_principle_A_domain_3"
247
+ },
248
+ "blimp_principle_A_reconstruction": {
249
+ "acc,none": 0.47,
250
+ "acc_stderr,none": 0.015790799515836763,
251
+ "alias": " - blimp_principle_A_reconstruction"
252
+ },
253
+ "blimp_regular_plural_subject_verb_agreement_1": {
254
+ "acc,none": 0.965,
255
+ "acc_stderr,none": 0.005814534272734965,
256
+ "alias": " - blimp_regular_plural_subject_verb_agreement_1"
257
+ },
258
+ "blimp_regular_plural_subject_verb_agreement_2": {
259
+ "acc,none": 0.909,
260
+ "acc_stderr,none": 0.009099549538400248,
261
+ "alias": " - blimp_regular_plural_subject_verb_agreement_2"
262
+ },
263
+ "blimp_sentential_negation_npi_licensor_present": {
264
+ "acc,none": 0.985,
265
+ "acc_stderr,none": 0.003845749574503012,
266
+ "alias": " - blimp_sentential_negation_npi_licensor_present"
267
+ },
268
+ "blimp_sentential_negation_npi_scope": {
269
+ "acc,none": 0.759,
270
+ "acc_stderr,none": 0.01353152253451541,
271
+ "alias": " - blimp_sentential_negation_npi_scope"
272
+ },
273
+ "blimp_sentential_subject_island": {
274
+ "acc,none": 0.455,
275
+ "acc_stderr,none": 0.01575510149834709,
276
+ "alias": " - blimp_sentential_subject_island"
277
+ },
278
+ "blimp_superlative_quantifiers_1": {
279
+ "acc,none": 0.848,
280
+ "acc_stderr,none": 0.01135891830347528,
281
+ "alias": " - blimp_superlative_quantifiers_1"
282
+ },
283
+ "blimp_superlative_quantifiers_2": {
284
+ "acc,none": 0.75,
285
+ "acc_stderr,none": 0.013699915608779773,
286
+ "alias": " - blimp_superlative_quantifiers_2"
287
+ },
288
+ "blimp_tough_vs_raising_1": {
289
+ "acc,none": 0.709,
290
+ "acc_stderr,none": 0.014370995982377953,
291
+ "alias": " - blimp_tough_vs_raising_1"
292
+ },
293
+ "blimp_tough_vs_raising_2": {
294
+ "acc,none": 0.877,
295
+ "acc_stderr,none": 0.010391293421849883,
296
+ "alias": " - blimp_tough_vs_raising_2"
297
+ },
298
+ "blimp_transitive": {
299
+ "acc,none": 0.891,
300
+ "acc_stderr,none": 0.009859828407037195,
301
+ "alias": " - blimp_transitive"
302
+ },
303
+ "blimp_wh_island": {
304
+ "acc,none": 0.762,
305
+ "acc_stderr,none": 0.01347358666196722,
306
+ "alias": " - blimp_wh_island"
307
+ },
308
+ "blimp_wh_questions_object_gap": {
309
+ "acc,none": 0.865,
310
+ "acc_stderr,none": 0.010811655372416053,
311
+ "alias": " - blimp_wh_questions_object_gap"
312
+ },
313
+ "blimp_wh_questions_subject_gap": {
314
+ "acc,none": 0.949,
315
+ "acc_stderr,none": 0.006960420062571401,
316
+ "alias": " - blimp_wh_questions_subject_gap"
317
+ },
318
+ "blimp_wh_questions_subject_gap_long_distance": {
319
+ "acc,none": 0.909,
320
+ "acc_stderr,none": 0.00909954953840024,
321
+ "alias": " - blimp_wh_questions_subject_gap_long_distance"
322
+ },
323
+ "blimp_wh_vs_that_no_gap": {
324
+ "acc,none": 0.975,
325
+ "acc_stderr,none": 0.004939574819698455,
326
+ "alias": " - blimp_wh_vs_that_no_gap"
327
+ },
328
+ "blimp_wh_vs_that_no_gap_long_distance": {
329
+ "acc,none": 0.962,
330
+ "acc_stderr,none": 0.006049181150584934,
331
+ "alias": " - blimp_wh_vs_that_no_gap_long_distance"
332
+ },
333
+ "blimp_wh_vs_that_with_gap": {
334
+ "acc,none": 0.467,
335
+ "acc_stderr,none": 0.015784807891138786,
336
+ "alias": " - blimp_wh_vs_that_with_gap"
337
+ },
338
+ "blimp_wh_vs_that_with_gap_long_distance": {
339
+ "acc,none": 0.398,
340
+ "acc_stderr,none": 0.015486634102858924,
341
+ "alias": " - blimp_wh_vs_that_with_gap_long_distance"
342
+ }
343
+ },
344
+ "groups": {
345
+ "blimp": {
346
+ "acc,none": 0.8336119402985075,
347
+ "acc_stderr,none": 0.1509763959549486,
348
+ "alias": "blimp"
349
+ }
350
+ },
351
+ "configs": {
352
+ "blimp_adjunct_island": {
353
+ "task": "blimp_adjunct_island",
354
+ "group": "blimp",
355
+ "dataset_path": "blimp",
356
+ "dataset_name": "adjunct_island",
357
+ "validation_split": "train",
358
+ "doc_to_text": "",
359
+ "doc_to_target": 0,
360
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
361
+ "description": "",
362
+ "target_delimiter": " ",
363
+ "fewshot_delimiter": "\n\n",
364
+ "num_fewshot": 0,
365
+ "metric_list": [
366
+ {
367
+ "metric": "acc"
368
+ }
369
+ ],
370
+ "output_type": "multiple_choice",
371
+ "repeats": 1,
372
+ "should_decontaminate": true,
373
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
374
+ "metadata": {
375
+ "version": 1.0
376
+ }
377
+ },
378
+ "blimp_anaphor_gender_agreement": {
379
+ "task": "blimp_anaphor_gender_agreement",
380
+ "group": "blimp",
381
+ "dataset_path": "blimp",
382
+ "dataset_name": "anaphor_gender_agreement",
383
+ "validation_split": "train",
384
+ "doc_to_text": "",
385
+ "doc_to_target": 0,
386
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
387
+ "description": "",
388
+ "target_delimiter": " ",
389
+ "fewshot_delimiter": "\n\n",
390
+ "num_fewshot": 0,
391
+ "metric_list": [
392
+ {
393
+ "metric": "acc"
394
+ }
395
+ ],
396
+ "output_type": "multiple_choice",
397
+ "repeats": 1,
398
+ "should_decontaminate": true,
399
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
400
+ "metadata": {
401
+ "version": 1.0
402
+ }
403
+ },
404
+ "blimp_anaphor_number_agreement": {
405
+ "task": "blimp_anaphor_number_agreement",
406
+ "group": "blimp",
407
+ "dataset_path": "blimp",
408
+ "dataset_name": "anaphor_number_agreement",
409
+ "validation_split": "train",
410
+ "doc_to_text": "",
411
+ "doc_to_target": 0,
412
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
413
+ "description": "",
414
+ "target_delimiter": " ",
415
+ "fewshot_delimiter": "\n\n",
416
+ "num_fewshot": 0,
417
+ "metric_list": [
418
+ {
419
+ "metric": "acc"
420
+ }
421
+ ],
422
+ "output_type": "multiple_choice",
423
+ "repeats": 1,
424
+ "should_decontaminate": true,
425
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
426
+ "metadata": {
427
+ "version": 1.0
428
+ }
429
+ },
430
+ "blimp_animate_subject_passive": {
431
+ "task": "blimp_animate_subject_passive",
432
+ "group": "blimp",
433
+ "dataset_path": "blimp",
434
+ "dataset_name": "animate_subject_passive",
435
+ "validation_split": "train",
436
+ "doc_to_text": "",
437
+ "doc_to_target": 0,
438
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
439
+ "description": "",
440
+ "target_delimiter": " ",
441
+ "fewshot_delimiter": "\n\n",
442
+ "num_fewshot": 0,
443
+ "metric_list": [
444
+ {
445
+ "metric": "acc"
446
+ }
447
+ ],
448
+ "output_type": "multiple_choice",
449
+ "repeats": 1,
450
+ "should_decontaminate": true,
451
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
452
+ "metadata": {
453
+ "version": 1.0
454
+ }
455
+ },
456
+ "blimp_animate_subject_trans": {
457
+ "task": "blimp_animate_subject_trans",
458
+ "group": "blimp",
459
+ "dataset_path": "blimp",
460
+ "dataset_name": "animate_subject_trans",
461
+ "validation_split": "train",
462
+ "doc_to_text": "",
463
+ "doc_to_target": 0,
464
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
465
+ "description": "",
466
+ "target_delimiter": " ",
467
+ "fewshot_delimiter": "\n\n",
468
+ "num_fewshot": 0,
469
+ "metric_list": [
470
+ {
471
+ "metric": "acc"
472
+ }
473
+ ],
474
+ "output_type": "multiple_choice",
475
+ "repeats": 1,
476
+ "should_decontaminate": true,
477
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
478
+ "metadata": {
479
+ "version": 1.0
480
+ }
481
+ },
482
+ "blimp_causative": {
483
+ "task": "blimp_causative",
484
+ "group": "blimp",
485
+ "dataset_path": "blimp",
486
+ "dataset_name": "causative",
487
+ "validation_split": "train",
488
+ "doc_to_text": "",
489
+ "doc_to_target": 0,
490
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
491
+ "description": "",
492
+ "target_delimiter": " ",
493
+ "fewshot_delimiter": "\n\n",
494
+ "num_fewshot": 0,
495
+ "metric_list": [
496
+ {
497
+ "metric": "acc"
498
+ }
499
+ ],
500
+ "output_type": "multiple_choice",
501
+ "repeats": 1,
502
+ "should_decontaminate": true,
503
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
504
+ "metadata": {
505
+ "version": 1.0
506
+ }
507
+ },
508
+ "blimp_complex_NP_island": {
509
+ "task": "blimp_complex_NP_island",
510
+ "group": "blimp",
511
+ "dataset_path": "blimp",
512
+ "dataset_name": "complex_NP_island",
513
+ "validation_split": "train",
514
+ "doc_to_text": "",
515
+ "doc_to_target": 0,
516
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
517
+ "description": "",
518
+ "target_delimiter": " ",
519
+ "fewshot_delimiter": "\n\n",
520
+ "num_fewshot": 0,
521
+ "metric_list": [
522
+ {
523
+ "metric": "acc"
524
+ }
525
+ ],
526
+ "output_type": "multiple_choice",
527
+ "repeats": 1,
528
+ "should_decontaminate": true,
529
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
530
+ "metadata": {
531
+ "version": 1.0
532
+ }
533
+ },
534
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
535
+ "task": "blimp_coordinate_structure_constraint_complex_left_branch",
536
+ "group": "blimp",
537
+ "dataset_path": "blimp",
538
+ "dataset_name": "coordinate_structure_constraint_complex_left_branch",
539
+ "validation_split": "train",
540
+ "doc_to_text": "",
541
+ "doc_to_target": 0,
542
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
543
+ "description": "",
544
+ "target_delimiter": " ",
545
+ "fewshot_delimiter": "\n\n",
546
+ "num_fewshot": 0,
547
+ "metric_list": [
548
+ {
549
+ "metric": "acc"
550
+ }
551
+ ],
552
+ "output_type": "multiple_choice",
553
+ "repeats": 1,
554
+ "should_decontaminate": true,
555
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
556
+ "metadata": {
557
+ "version": 1.0
558
+ }
559
+ },
560
+ "blimp_coordinate_structure_constraint_object_extraction": {
561
+ "task": "blimp_coordinate_structure_constraint_object_extraction",
562
+ "group": "blimp",
563
+ "dataset_path": "blimp",
564
+ "dataset_name": "coordinate_structure_constraint_object_extraction",
565
+ "validation_split": "train",
566
+ "doc_to_text": "",
567
+ "doc_to_target": 0,
568
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
569
+ "description": "",
570
+ "target_delimiter": " ",
571
+ "fewshot_delimiter": "\n\n",
572
+ "num_fewshot": 0,
573
+ "metric_list": [
574
+ {
575
+ "metric": "acc"
576
+ }
577
+ ],
578
+ "output_type": "multiple_choice",
579
+ "repeats": 1,
580
+ "should_decontaminate": true,
581
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
582
+ "metadata": {
583
+ "version": 1.0
584
+ }
585
+ },
586
+ "blimp_determiner_noun_agreement_1": {
587
+ "task": "blimp_determiner_noun_agreement_1",
588
+ "group": "blimp",
589
+ "dataset_path": "blimp",
590
+ "dataset_name": "determiner_noun_agreement_1",
591
+ "validation_split": "train",
592
+ "doc_to_text": "",
593
+ "doc_to_target": 0,
594
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
595
+ "description": "",
596
+ "target_delimiter": " ",
597
+ "fewshot_delimiter": "\n\n",
598
+ "num_fewshot": 0,
599
+ "metric_list": [
600
+ {
601
+ "metric": "acc"
602
+ }
603
+ ],
604
+ "output_type": "multiple_choice",
605
+ "repeats": 1,
606
+ "should_decontaminate": true,
607
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
608
+ "metadata": {
609
+ "version": 1.0
610
+ }
611
+ },
612
+ "blimp_determiner_noun_agreement_2": {
613
+ "task": "blimp_determiner_noun_agreement_2",
614
+ "group": "blimp",
615
+ "dataset_path": "blimp",
616
+ "dataset_name": "determiner_noun_agreement_2",
617
+ "validation_split": "train",
618
+ "doc_to_text": "",
619
+ "doc_to_target": 0,
620
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
621
+ "description": "",
622
+ "target_delimiter": " ",
623
+ "fewshot_delimiter": "\n\n",
624
+ "num_fewshot": 0,
625
+ "metric_list": [
626
+ {
627
+ "metric": "acc"
628
+ }
629
+ ],
630
+ "output_type": "multiple_choice",
631
+ "repeats": 1,
632
+ "should_decontaminate": true,
633
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
634
+ "metadata": {
635
+ "version": 1.0
636
+ }
637
+ },
638
+ "blimp_determiner_noun_agreement_irregular_1": {
639
+ "task": "blimp_determiner_noun_agreement_irregular_1",
640
+ "group": "blimp",
641
+ "dataset_path": "blimp",
642
+ "dataset_name": "determiner_noun_agreement_irregular_1",
643
+ "validation_split": "train",
644
+ "doc_to_text": "",
645
+ "doc_to_target": 0,
646
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
647
+ "description": "",
648
+ "target_delimiter": " ",
649
+ "fewshot_delimiter": "\n\n",
650
+ "num_fewshot": 0,
651
+ "metric_list": [
652
+ {
653
+ "metric": "acc"
654
+ }
655
+ ],
656
+ "output_type": "multiple_choice",
657
+ "repeats": 1,
658
+ "should_decontaminate": true,
659
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
660
+ "metadata": {
661
+ "version": 1.0
662
+ }
663
+ },
664
+ "blimp_determiner_noun_agreement_irregular_2": {
665
+ "task": "blimp_determiner_noun_agreement_irregular_2",
666
+ "group": "blimp",
667
+ "dataset_path": "blimp",
668
+ "dataset_name": "determiner_noun_agreement_irregular_2",
669
+ "validation_split": "train",
670
+ "doc_to_text": "",
671
+ "doc_to_target": 0,
672
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
673
+ "description": "",
674
+ "target_delimiter": " ",
675
+ "fewshot_delimiter": "\n\n",
676
+ "num_fewshot": 0,
677
+ "metric_list": [
678
+ {
679
+ "metric": "acc"
680
+ }
681
+ ],
682
+ "output_type": "multiple_choice",
683
+ "repeats": 1,
684
+ "should_decontaminate": true,
685
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
686
+ "metadata": {
687
+ "version": 1.0
688
+ }
689
+ },
690
+ "blimp_determiner_noun_agreement_with_adj_2": {
691
+ "task": "blimp_determiner_noun_agreement_with_adj_2",
692
+ "group": "blimp",
693
+ "dataset_path": "blimp",
694
+ "dataset_name": "determiner_noun_agreement_with_adj_2",
695
+ "validation_split": "train",
696
+ "doc_to_text": "",
697
+ "doc_to_target": 0,
698
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
699
+ "description": "",
700
+ "target_delimiter": " ",
701
+ "fewshot_delimiter": "\n\n",
702
+ "num_fewshot": 0,
703
+ "metric_list": [
704
+ {
705
+ "metric": "acc"
706
+ }
707
+ ],
708
+ "output_type": "multiple_choice",
709
+ "repeats": 1,
710
+ "should_decontaminate": true,
711
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
712
+ "metadata": {
713
+ "version": 1.0
714
+ }
715
+ },
716
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
717
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_1",
718
+ "group": "blimp",
719
+ "dataset_path": "blimp",
720
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_1",
721
+ "validation_split": "train",
722
+ "doc_to_text": "",
723
+ "doc_to_target": 0,
724
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
725
+ "description": "",
726
+ "target_delimiter": " ",
727
+ "fewshot_delimiter": "\n\n",
728
+ "num_fewshot": 0,
729
+ "metric_list": [
730
+ {
731
+ "metric": "acc"
732
+ }
733
+ ],
734
+ "output_type": "multiple_choice",
735
+ "repeats": 1,
736
+ "should_decontaminate": true,
737
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
738
+ "metadata": {
739
+ "version": 1.0
740
+ }
741
+ },
742
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
743
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_2",
744
+ "group": "blimp",
745
+ "dataset_path": "blimp",
746
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_2",
747
+ "validation_split": "train",
748
+ "doc_to_text": "",
749
+ "doc_to_target": 0,
750
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
751
+ "description": "",
752
+ "target_delimiter": " ",
753
+ "fewshot_delimiter": "\n\n",
754
+ "num_fewshot": 0,
755
+ "metric_list": [
756
+ {
757
+ "metric": "acc"
758
+ }
759
+ ],
760
+ "output_type": "multiple_choice",
761
+ "repeats": 1,
762
+ "should_decontaminate": true,
763
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
764
+ "metadata": {
765
+ "version": 1.0
766
+ }
767
+ },
768
+ "blimp_determiner_noun_agreement_with_adjective_1": {
769
+ "task": "blimp_determiner_noun_agreement_with_adjective_1",
770
+ "group": "blimp",
771
+ "dataset_path": "blimp",
772
+ "dataset_name": "determiner_noun_agreement_with_adjective_1",
773
+ "validation_split": "train",
774
+ "doc_to_text": "",
775
+ "doc_to_target": 0,
776
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
777
+ "description": "",
778
+ "target_delimiter": " ",
779
+ "fewshot_delimiter": "\n\n",
780
+ "num_fewshot": 0,
781
+ "metric_list": [
782
+ {
783
+ "metric": "acc"
784
+ }
785
+ ],
786
+ "output_type": "multiple_choice",
787
+ "repeats": 1,
788
+ "should_decontaminate": true,
789
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
790
+ "metadata": {
791
+ "version": 1.0
792
+ }
793
+ },
794
+ "blimp_distractor_agreement_relational_noun": {
795
+ "task": "blimp_distractor_agreement_relational_noun",
796
+ "group": "blimp",
797
+ "dataset_path": "blimp",
798
+ "dataset_name": "distractor_agreement_relational_noun",
799
+ "validation_split": "train",
800
+ "doc_to_text": "",
801
+ "doc_to_target": 0,
802
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
803
+ "description": "",
804
+ "target_delimiter": " ",
805
+ "fewshot_delimiter": "\n\n",
806
+ "num_fewshot": 0,
807
+ "metric_list": [
808
+ {
809
+ "metric": "acc"
810
+ }
811
+ ],
812
+ "output_type": "multiple_choice",
813
+ "repeats": 1,
814
+ "should_decontaminate": true,
815
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
816
+ "metadata": {
817
+ "version": 1.0
818
+ }
819
+ },
820
+ "blimp_distractor_agreement_relative_clause": {
821
+ "task": "blimp_distractor_agreement_relative_clause",
822
+ "group": "blimp",
823
+ "dataset_path": "blimp",
824
+ "dataset_name": "distractor_agreement_relative_clause",
825
+ "validation_split": "train",
826
+ "doc_to_text": "",
827
+ "doc_to_target": 0,
828
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
829
+ "description": "",
830
+ "target_delimiter": " ",
831
+ "fewshot_delimiter": "\n\n",
832
+ "num_fewshot": 0,
833
+ "metric_list": [
834
+ {
835
+ "metric": "acc"
836
+ }
837
+ ],
838
+ "output_type": "multiple_choice",
839
+ "repeats": 1,
840
+ "should_decontaminate": true,
841
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
842
+ "metadata": {
843
+ "version": 1.0
844
+ }
845
+ },
846
+ "blimp_drop_argument": {
847
+ "task": "blimp_drop_argument",
848
+ "group": "blimp",
849
+ "dataset_path": "blimp",
850
+ "dataset_name": "drop_argument",
851
+ "validation_split": "train",
852
+ "doc_to_text": "",
853
+ "doc_to_target": 0,
854
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
855
+ "description": "",
856
+ "target_delimiter": " ",
857
+ "fewshot_delimiter": "\n\n",
858
+ "num_fewshot": 0,
859
+ "metric_list": [
860
+ {
861
+ "metric": "acc"
862
+ }
863
+ ],
864
+ "output_type": "multiple_choice",
865
+ "repeats": 1,
866
+ "should_decontaminate": true,
867
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
868
+ "metadata": {
869
+ "version": 1.0
870
+ }
871
+ },
872
+ "blimp_ellipsis_n_bar_1": {
873
+ "task": "blimp_ellipsis_n_bar_1",
874
+ "group": "blimp",
875
+ "dataset_path": "blimp",
876
+ "dataset_name": "ellipsis_n_bar_1",
877
+ "validation_split": "train",
878
+ "doc_to_text": "",
879
+ "doc_to_target": 0,
880
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
881
+ "description": "",
882
+ "target_delimiter": " ",
883
+ "fewshot_delimiter": "\n\n",
884
+ "num_fewshot": 0,
885
+ "metric_list": [
886
+ {
887
+ "metric": "acc"
888
+ }
889
+ ],
890
+ "output_type": "multiple_choice",
891
+ "repeats": 1,
892
+ "should_decontaminate": true,
893
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
894
+ "metadata": {
895
+ "version": 1.0
896
+ }
897
+ },
898
+ "blimp_ellipsis_n_bar_2": {
899
+ "task": "blimp_ellipsis_n_bar_2",
900
+ "group": "blimp",
901
+ "dataset_path": "blimp",
902
+ "dataset_name": "ellipsis_n_bar_2",
903
+ "validation_split": "train",
904
+ "doc_to_text": "",
905
+ "doc_to_target": 0,
906
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
907
+ "description": "",
908
+ "target_delimiter": " ",
909
+ "fewshot_delimiter": "\n\n",
910
+ "num_fewshot": 0,
911
+ "metric_list": [
912
+ {
913
+ "metric": "acc"
914
+ }
915
+ ],
916
+ "output_type": "multiple_choice",
917
+ "repeats": 1,
918
+ "should_decontaminate": true,
919
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
920
+ "metadata": {
921
+ "version": 1.0
922
+ }
923
+ },
924
+ "blimp_existential_there_object_raising": {
925
+ "task": "blimp_existential_there_object_raising",
926
+ "group": "blimp",
927
+ "dataset_path": "blimp",
928
+ "dataset_name": "existential_there_object_raising",
929
+ "validation_split": "train",
930
+ "doc_to_text": "",
931
+ "doc_to_target": 0,
932
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
933
+ "description": "",
934
+ "target_delimiter": " ",
935
+ "fewshot_delimiter": "\n\n",
936
+ "num_fewshot": 0,
937
+ "metric_list": [
938
+ {
939
+ "metric": "acc"
940
+ }
941
+ ],
942
+ "output_type": "multiple_choice",
943
+ "repeats": 1,
944
+ "should_decontaminate": true,
945
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
946
+ "metadata": {
947
+ "version": 1.0
948
+ }
949
+ },
950
+ "blimp_existential_there_quantifiers_1": {
951
+ "task": "blimp_existential_there_quantifiers_1",
952
+ "group": "blimp",
953
+ "dataset_path": "blimp",
954
+ "dataset_name": "existential_there_quantifiers_1",
955
+ "validation_split": "train",
956
+ "doc_to_text": "",
957
+ "doc_to_target": 0,
958
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
959
+ "description": "",
960
+ "target_delimiter": " ",
961
+ "fewshot_delimiter": "\n\n",
962
+ "num_fewshot": 0,
963
+ "metric_list": [
964
+ {
965
+ "metric": "acc"
966
+ }
967
+ ],
968
+ "output_type": "multiple_choice",
969
+ "repeats": 1,
970
+ "should_decontaminate": true,
971
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
972
+ "metadata": {
973
+ "version": 1.0
974
+ }
975
+ },
976
+ "blimp_existential_there_quantifiers_2": {
977
+ "task": "blimp_existential_there_quantifiers_2",
978
+ "group": "blimp",
979
+ "dataset_path": "blimp",
980
+ "dataset_name": "existential_there_quantifiers_2",
981
+ "validation_split": "train",
982
+ "doc_to_text": "",
983
+ "doc_to_target": 0,
984
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
985
+ "description": "",
986
+ "target_delimiter": " ",
987
+ "fewshot_delimiter": "\n\n",
988
+ "num_fewshot": 0,
989
+ "metric_list": [
990
+ {
991
+ "metric": "acc"
992
+ }
993
+ ],
994
+ "output_type": "multiple_choice",
995
+ "repeats": 1,
996
+ "should_decontaminate": true,
997
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
998
+ "metadata": {
999
+ "version": 1.0
1000
+ }
1001
+ },
1002
+ "blimp_existential_there_subject_raising": {
1003
+ "task": "blimp_existential_there_subject_raising",
1004
+ "group": "blimp",
1005
+ "dataset_path": "blimp",
1006
+ "dataset_name": "existential_there_subject_raising",
1007
+ "validation_split": "train",
1008
+ "doc_to_text": "",
1009
+ "doc_to_target": 0,
1010
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1011
+ "description": "",
1012
+ "target_delimiter": " ",
1013
+ "fewshot_delimiter": "\n\n",
1014
+ "num_fewshot": 0,
1015
+ "metric_list": [
1016
+ {
1017
+ "metric": "acc"
1018
+ }
1019
+ ],
1020
+ "output_type": "multiple_choice",
1021
+ "repeats": 1,
1022
+ "should_decontaminate": true,
1023
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1024
+ "metadata": {
1025
+ "version": 1.0
1026
+ }
1027
+ },
1028
+ "blimp_expletive_it_object_raising": {
1029
+ "task": "blimp_expletive_it_object_raising",
1030
+ "group": "blimp",
1031
+ "dataset_path": "blimp",
1032
+ "dataset_name": "expletive_it_object_raising",
1033
+ "validation_split": "train",
1034
+ "doc_to_text": "",
1035
+ "doc_to_target": 0,
1036
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1037
+ "description": "",
1038
+ "target_delimiter": " ",
1039
+ "fewshot_delimiter": "\n\n",
1040
+ "num_fewshot": 0,
1041
+ "metric_list": [
1042
+ {
1043
+ "metric": "acc"
1044
+ }
1045
+ ],
1046
+ "output_type": "multiple_choice",
1047
+ "repeats": 1,
1048
+ "should_decontaminate": true,
1049
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1050
+ "metadata": {
1051
+ "version": 1.0
1052
+ }
1053
+ },
1054
+ "blimp_inchoative": {
1055
+ "task": "blimp_inchoative",
1056
+ "group": "blimp",
1057
+ "dataset_path": "blimp",
1058
+ "dataset_name": "inchoative",
1059
+ "validation_split": "train",
1060
+ "doc_to_text": "",
1061
+ "doc_to_target": 0,
1062
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1063
+ "description": "",
1064
+ "target_delimiter": " ",
1065
+ "fewshot_delimiter": "\n\n",
1066
+ "num_fewshot": 0,
1067
+ "metric_list": [
1068
+ {
1069
+ "metric": "acc"
1070
+ }
1071
+ ],
1072
+ "output_type": "multiple_choice",
1073
+ "repeats": 1,
1074
+ "should_decontaminate": true,
1075
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1076
+ "metadata": {
1077
+ "version": 1.0
1078
+ }
1079
+ },
1080
+ "blimp_intransitive": {
1081
+ "task": "blimp_intransitive",
1082
+ "group": "blimp",
1083
+ "dataset_path": "blimp",
1084
+ "dataset_name": "intransitive",
1085
+ "validation_split": "train",
1086
+ "doc_to_text": "",
1087
+ "doc_to_target": 0,
1088
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1089
+ "description": "",
1090
+ "target_delimiter": " ",
1091
+ "fewshot_delimiter": "\n\n",
1092
+ "num_fewshot": 0,
1093
+ "metric_list": [
1094
+ {
1095
+ "metric": "acc"
1096
+ }
1097
+ ],
1098
+ "output_type": "multiple_choice",
1099
+ "repeats": 1,
1100
+ "should_decontaminate": true,
1101
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1102
+ "metadata": {
1103
+ "version": 1.0
1104
+ }
1105
+ },
1106
+ "blimp_irregular_past_participle_adjectives": {
1107
+ "task": "blimp_irregular_past_participle_adjectives",
1108
+ "group": "blimp",
1109
+ "dataset_path": "blimp",
1110
+ "dataset_name": "irregular_past_participle_adjectives",
1111
+ "validation_split": "train",
1112
+ "doc_to_text": "",
1113
+ "doc_to_target": 0,
1114
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1115
+ "description": "",
1116
+ "target_delimiter": " ",
1117
+ "fewshot_delimiter": "\n\n",
1118
+ "num_fewshot": 0,
1119
+ "metric_list": [
1120
+ {
1121
+ "metric": "acc"
1122
+ }
1123
+ ],
1124
+ "output_type": "multiple_choice",
1125
+ "repeats": 1,
1126
+ "should_decontaminate": true,
1127
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1128
+ "metadata": {
1129
+ "version": 1.0
1130
+ }
1131
+ },
1132
+ "blimp_irregular_past_participle_verbs": {
1133
+ "task": "blimp_irregular_past_participle_verbs",
1134
+ "group": "blimp",
1135
+ "dataset_path": "blimp",
1136
+ "dataset_name": "irregular_past_participle_verbs",
1137
+ "validation_split": "train",
1138
+ "doc_to_text": "",
1139
+ "doc_to_target": 0,
1140
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1141
+ "description": "",
1142
+ "target_delimiter": " ",
1143
+ "fewshot_delimiter": "\n\n",
1144
+ "num_fewshot": 0,
1145
+ "metric_list": [
1146
+ {
1147
+ "metric": "acc"
1148
+ }
1149
+ ],
1150
+ "output_type": "multiple_choice",
1151
+ "repeats": 1,
1152
+ "should_decontaminate": true,
1153
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1154
+ "metadata": {
1155
+ "version": 1.0
1156
+ }
1157
+ },
1158
+ "blimp_irregular_plural_subject_verb_agreement_1": {
1159
+ "task": "blimp_irregular_plural_subject_verb_agreement_1",
1160
+ "group": "blimp",
1161
+ "dataset_path": "blimp",
1162
+ "dataset_name": "irregular_plural_subject_verb_agreement_1",
1163
+ "validation_split": "train",
1164
+ "doc_to_text": "",
1165
+ "doc_to_target": 0,
1166
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1167
+ "description": "",
1168
+ "target_delimiter": " ",
1169
+ "fewshot_delimiter": "\n\n",
1170
+ "num_fewshot": 0,
1171
+ "metric_list": [
1172
+ {
1173
+ "metric": "acc"
1174
+ }
1175
+ ],
1176
+ "output_type": "multiple_choice",
1177
+ "repeats": 1,
1178
+ "should_decontaminate": true,
1179
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1180
+ "metadata": {
1181
+ "version": 1.0
1182
+ }
1183
+ },
1184
+ "blimp_irregular_plural_subject_verb_agreement_2": {
1185
+ "task": "blimp_irregular_plural_subject_verb_agreement_2",
1186
+ "group": "blimp",
1187
+ "dataset_path": "blimp",
1188
+ "dataset_name": "irregular_plural_subject_verb_agreement_2",
1189
+ "validation_split": "train",
1190
+ "doc_to_text": "",
1191
+ "doc_to_target": 0,
1192
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1193
+ "description": "",
1194
+ "target_delimiter": " ",
1195
+ "fewshot_delimiter": "\n\n",
1196
+ "num_fewshot": 0,
1197
+ "metric_list": [
1198
+ {
1199
+ "metric": "acc"
1200
+ }
1201
+ ],
1202
+ "output_type": "multiple_choice",
1203
+ "repeats": 1,
1204
+ "should_decontaminate": true,
1205
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1206
+ "metadata": {
1207
+ "version": 1.0
1208
+ }
1209
+ },
1210
+ "blimp_left_branch_island_echo_question": {
1211
+ "task": "blimp_left_branch_island_echo_question",
1212
+ "group": "blimp",
1213
+ "dataset_path": "blimp",
1214
+ "dataset_name": "left_branch_island_echo_question",
1215
+ "validation_split": "train",
1216
+ "doc_to_text": "",
1217
+ "doc_to_target": 0,
1218
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1219
+ "description": "",
1220
+ "target_delimiter": " ",
1221
+ "fewshot_delimiter": "\n\n",
1222
+ "num_fewshot": 0,
1223
+ "metric_list": [
1224
+ {
1225
+ "metric": "acc"
1226
+ }
1227
+ ],
1228
+ "output_type": "multiple_choice",
1229
+ "repeats": 1,
1230
+ "should_decontaminate": true,
1231
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1232
+ "metadata": {
1233
+ "version": 1.0
1234
+ }
1235
+ },
1236
+ "blimp_left_branch_island_simple_question": {
1237
+ "task": "blimp_left_branch_island_simple_question",
1238
+ "group": "blimp",
1239
+ "dataset_path": "blimp",
1240
+ "dataset_name": "left_branch_island_simple_question",
1241
+ "validation_split": "train",
1242
+ "doc_to_text": "",
1243
+ "doc_to_target": 0,
1244
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1245
+ "description": "",
1246
+ "target_delimiter": " ",
1247
+ "fewshot_delimiter": "\n\n",
1248
+ "num_fewshot": 0,
1249
+ "metric_list": [
1250
+ {
1251
+ "metric": "acc"
1252
+ }
1253
+ ],
1254
+ "output_type": "multiple_choice",
1255
+ "repeats": 1,
1256
+ "should_decontaminate": true,
1257
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1258
+ "metadata": {
1259
+ "version": 1.0
1260
+ }
1261
+ },
1262
+ "blimp_matrix_question_npi_licensor_present": {
1263
+ "task": "blimp_matrix_question_npi_licensor_present",
1264
+ "group": "blimp",
1265
+ "dataset_path": "blimp",
1266
+ "dataset_name": "matrix_question_npi_licensor_present",
1267
+ "validation_split": "train",
1268
+ "doc_to_text": "",
1269
+ "doc_to_target": 0,
1270
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1271
+ "description": "",
1272
+ "target_delimiter": " ",
1273
+ "fewshot_delimiter": "\n\n",
1274
+ "num_fewshot": 0,
1275
+ "metric_list": [
1276
+ {
1277
+ "metric": "acc"
1278
+ }
1279
+ ],
1280
+ "output_type": "multiple_choice",
1281
+ "repeats": 1,
1282
+ "should_decontaminate": true,
1283
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1284
+ "metadata": {
1285
+ "version": 1.0
1286
+ }
1287
+ },
1288
+ "blimp_npi_present_1": {
1289
+ "task": "blimp_npi_present_1",
1290
+ "group": "blimp",
1291
+ "dataset_path": "blimp",
1292
+ "dataset_name": "npi_present_1",
1293
+ "validation_split": "train",
1294
+ "doc_to_text": "",
1295
+ "doc_to_target": 0,
1296
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1297
+ "description": "",
1298
+ "target_delimiter": " ",
1299
+ "fewshot_delimiter": "\n\n",
1300
+ "num_fewshot": 0,
1301
+ "metric_list": [
1302
+ {
1303
+ "metric": "acc"
1304
+ }
1305
+ ],
1306
+ "output_type": "multiple_choice",
1307
+ "repeats": 1,
1308
+ "should_decontaminate": true,
1309
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1310
+ "metadata": {
1311
+ "version": 1.0
1312
+ }
1313
+ },
1314
+ "blimp_npi_present_2": {
1315
+ "task": "blimp_npi_present_2",
1316
+ "group": "blimp",
1317
+ "dataset_path": "blimp",
1318
+ "dataset_name": "npi_present_2",
1319
+ "validation_split": "train",
1320
+ "doc_to_text": "",
1321
+ "doc_to_target": 0,
1322
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1323
+ "description": "",
1324
+ "target_delimiter": " ",
1325
+ "fewshot_delimiter": "\n\n",
1326
+ "num_fewshot": 0,
1327
+ "metric_list": [
1328
+ {
1329
+ "metric": "acc"
1330
+ }
1331
+ ],
1332
+ "output_type": "multiple_choice",
1333
+ "repeats": 1,
1334
+ "should_decontaminate": true,
1335
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1336
+ "metadata": {
1337
+ "version": 1.0
1338
+ }
1339
+ },
1340
+ "blimp_only_npi_licensor_present": {
1341
+ "task": "blimp_only_npi_licensor_present",
1342
+ "group": "blimp",
1343
+ "dataset_path": "blimp",
1344
+ "dataset_name": "only_npi_licensor_present",
1345
+ "validation_split": "train",
1346
+ "doc_to_text": "",
1347
+ "doc_to_target": 0,
1348
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1349
+ "description": "",
1350
+ "target_delimiter": " ",
1351
+ "fewshot_delimiter": "\n\n",
1352
+ "num_fewshot": 0,
1353
+ "metric_list": [
1354
+ {
1355
+ "metric": "acc"
1356
+ }
1357
+ ],
1358
+ "output_type": "multiple_choice",
1359
+ "repeats": 1,
1360
+ "should_decontaminate": true,
1361
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1362
+ "metadata": {
1363
+ "version": 1.0
1364
+ }
1365
+ },
1366
+ "blimp_only_npi_scope": {
1367
+ "task": "blimp_only_npi_scope",
1368
+ "group": "blimp",
1369
+ "dataset_path": "blimp",
1370
+ "dataset_name": "only_npi_scope",
1371
+ "validation_split": "train",
1372
+ "doc_to_text": "",
1373
+ "doc_to_target": 0,
1374
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1375
+ "description": "",
1376
+ "target_delimiter": " ",
1377
+ "fewshot_delimiter": "\n\n",
1378
+ "num_fewshot": 0,
1379
+ "metric_list": [
1380
+ {
1381
+ "metric": "acc"
1382
+ }
1383
+ ],
1384
+ "output_type": "multiple_choice",
1385
+ "repeats": 1,
1386
+ "should_decontaminate": true,
1387
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1388
+ "metadata": {
1389
+ "version": 1.0
1390
+ }
1391
+ },
1392
+ "blimp_passive_1": {
1393
+ "task": "blimp_passive_1",
1394
+ "group": "blimp",
1395
+ "dataset_path": "blimp",
1396
+ "dataset_name": "passive_1",
1397
+ "validation_split": "train",
1398
+ "doc_to_text": "",
1399
+ "doc_to_target": 0,
1400
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1401
+ "description": "",
1402
+ "target_delimiter": " ",
1403
+ "fewshot_delimiter": "\n\n",
1404
+ "num_fewshot": 0,
1405
+ "metric_list": [
1406
+ {
1407
+ "metric": "acc"
1408
+ }
1409
+ ],
1410
+ "output_type": "multiple_choice",
1411
+ "repeats": 1,
1412
+ "should_decontaminate": true,
1413
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1414
+ "metadata": {
1415
+ "version": 1.0
1416
+ }
1417
+ },
1418
+ "blimp_passive_2": {
1419
+ "task": "blimp_passive_2",
1420
+ "group": "blimp",
1421
+ "dataset_path": "blimp",
1422
+ "dataset_name": "passive_2",
1423
+ "validation_split": "train",
1424
+ "doc_to_text": "",
1425
+ "doc_to_target": 0,
1426
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1427
+ "description": "",
1428
+ "target_delimiter": " ",
1429
+ "fewshot_delimiter": "\n\n",
1430
+ "num_fewshot": 0,
1431
+ "metric_list": [
1432
+ {
1433
+ "metric": "acc"
1434
+ }
1435
+ ],
1436
+ "output_type": "multiple_choice",
1437
+ "repeats": 1,
1438
+ "should_decontaminate": true,
1439
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1440
+ "metadata": {
1441
+ "version": 1.0
1442
+ }
1443
+ },
1444
+ "blimp_principle_A_c_command": {
1445
+ "task": "blimp_principle_A_c_command",
1446
+ "group": "blimp",
1447
+ "dataset_path": "blimp",
1448
+ "dataset_name": "principle_A_c_command",
1449
+ "validation_split": "train",
1450
+ "doc_to_text": "",
1451
+ "doc_to_target": 0,
1452
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1453
+ "description": "",
1454
+ "target_delimiter": " ",
1455
+ "fewshot_delimiter": "\n\n",
1456
+ "num_fewshot": 0,
1457
+ "metric_list": [
1458
+ {
1459
+ "metric": "acc"
1460
+ }
1461
+ ],
1462
+ "output_type": "multiple_choice",
1463
+ "repeats": 1,
1464
+ "should_decontaminate": true,
1465
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1466
+ "metadata": {
1467
+ "version": 1.0
1468
+ }
1469
+ },
1470
+ "blimp_principle_A_case_1": {
1471
+ "task": "blimp_principle_A_case_1",
1472
+ "group": "blimp",
1473
+ "dataset_path": "blimp",
1474
+ "dataset_name": "principle_A_case_1",
1475
+ "validation_split": "train",
1476
+ "doc_to_text": "",
1477
+ "doc_to_target": 0,
1478
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1479
+ "description": "",
1480
+ "target_delimiter": " ",
1481
+ "fewshot_delimiter": "\n\n",
1482
+ "num_fewshot": 0,
1483
+ "metric_list": [
1484
+ {
1485
+ "metric": "acc"
1486
+ }
1487
+ ],
1488
+ "output_type": "multiple_choice",
1489
+ "repeats": 1,
1490
+ "should_decontaminate": true,
1491
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1492
+ "metadata": {
1493
+ "version": 1.0
1494
+ }
1495
+ },
1496
+ "blimp_principle_A_case_2": {
1497
+ "task": "blimp_principle_A_case_2",
1498
+ "group": "blimp",
1499
+ "dataset_path": "blimp",
1500
+ "dataset_name": "principle_A_case_2",
1501
+ "validation_split": "train",
1502
+ "doc_to_text": "",
1503
+ "doc_to_target": 0,
1504
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1505
+ "description": "",
1506
+ "target_delimiter": " ",
1507
+ "fewshot_delimiter": "\n\n",
1508
+ "num_fewshot": 0,
1509
+ "metric_list": [
1510
+ {
1511
+ "metric": "acc"
1512
+ }
1513
+ ],
1514
+ "output_type": "multiple_choice",
1515
+ "repeats": 1,
1516
+ "should_decontaminate": true,
1517
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1518
+ "metadata": {
1519
+ "version": 1.0
1520
+ }
1521
+ },
1522
+ "blimp_principle_A_domain_1": {
1523
+ "task": "blimp_principle_A_domain_1",
1524
+ "group": "blimp",
1525
+ "dataset_path": "blimp",
1526
+ "dataset_name": "principle_A_domain_1",
1527
+ "validation_split": "train",
1528
+ "doc_to_text": "",
1529
+ "doc_to_target": 0,
1530
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1531
+ "description": "",
1532
+ "target_delimiter": " ",
1533
+ "fewshot_delimiter": "\n\n",
1534
+ "num_fewshot": 0,
1535
+ "metric_list": [
1536
+ {
1537
+ "metric": "acc"
1538
+ }
1539
+ ],
1540
+ "output_type": "multiple_choice",
1541
+ "repeats": 1,
1542
+ "should_decontaminate": true,
1543
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1544
+ "metadata": {
1545
+ "version": 1.0
1546
+ }
1547
+ },
1548
+ "blimp_principle_A_domain_2": {
1549
+ "task": "blimp_principle_A_domain_2",
1550
+ "group": "blimp",
1551
+ "dataset_path": "blimp",
1552
+ "dataset_name": "principle_A_domain_2",
1553
+ "validation_split": "train",
1554
+ "doc_to_text": "",
1555
+ "doc_to_target": 0,
1556
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1557
+ "description": "",
1558
+ "target_delimiter": " ",
1559
+ "fewshot_delimiter": "\n\n",
1560
+ "num_fewshot": 0,
1561
+ "metric_list": [
1562
+ {
1563
+ "metric": "acc"
1564
+ }
1565
+ ],
1566
+ "output_type": "multiple_choice",
1567
+ "repeats": 1,
1568
+ "should_decontaminate": true,
1569
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1570
+ "metadata": {
1571
+ "version": 1.0
1572
+ }
1573
+ },
1574
+ "blimp_principle_A_domain_3": {
1575
+ "task": "blimp_principle_A_domain_3",
1576
+ "group": "blimp",
1577
+ "dataset_path": "blimp",
1578
+ "dataset_name": "principle_A_domain_3",
1579
+ "validation_split": "train",
1580
+ "doc_to_text": "",
1581
+ "doc_to_target": 0,
1582
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1583
+ "description": "",
1584
+ "target_delimiter": " ",
1585
+ "fewshot_delimiter": "\n\n",
1586
+ "num_fewshot": 0,
1587
+ "metric_list": [
1588
+ {
1589
+ "metric": "acc"
1590
+ }
1591
+ ],
1592
+ "output_type": "multiple_choice",
1593
+ "repeats": 1,
1594
+ "should_decontaminate": true,
1595
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1596
+ "metadata": {
1597
+ "version": 1.0
1598
+ }
1599
+ },
1600
+ "blimp_principle_A_reconstruction": {
1601
+ "task": "blimp_principle_A_reconstruction",
1602
+ "group": "blimp",
1603
+ "dataset_path": "blimp",
1604
+ "dataset_name": "principle_A_reconstruction",
1605
+ "validation_split": "train",
1606
+ "doc_to_text": "",
1607
+ "doc_to_target": 0,
1608
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1609
+ "description": "",
1610
+ "target_delimiter": " ",
1611
+ "fewshot_delimiter": "\n\n",
1612
+ "num_fewshot": 0,
1613
+ "metric_list": [
1614
+ {
1615
+ "metric": "acc"
1616
+ }
1617
+ ],
1618
+ "output_type": "multiple_choice",
1619
+ "repeats": 1,
1620
+ "should_decontaminate": true,
1621
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1622
+ "metadata": {
1623
+ "version": 1.0
1624
+ }
1625
+ },
1626
+ "blimp_regular_plural_subject_verb_agreement_1": {
1627
+ "task": "blimp_regular_plural_subject_verb_agreement_1",
1628
+ "group": "blimp",
1629
+ "dataset_path": "blimp",
1630
+ "dataset_name": "regular_plural_subject_verb_agreement_1",
1631
+ "validation_split": "train",
1632
+ "doc_to_text": "",
1633
+ "doc_to_target": 0,
1634
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1635
+ "description": "",
1636
+ "target_delimiter": " ",
1637
+ "fewshot_delimiter": "\n\n",
1638
+ "num_fewshot": 0,
1639
+ "metric_list": [
1640
+ {
1641
+ "metric": "acc"
1642
+ }
1643
+ ],
1644
+ "output_type": "multiple_choice",
1645
+ "repeats": 1,
1646
+ "should_decontaminate": true,
1647
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1648
+ "metadata": {
1649
+ "version": 1.0
1650
+ }
1651
+ },
1652
+ "blimp_regular_plural_subject_verb_agreement_2": {
1653
+ "task": "blimp_regular_plural_subject_verb_agreement_2",
1654
+ "group": "blimp",
1655
+ "dataset_path": "blimp",
1656
+ "dataset_name": "regular_plural_subject_verb_agreement_2",
1657
+ "validation_split": "train",
1658
+ "doc_to_text": "",
1659
+ "doc_to_target": 0,
1660
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1661
+ "description": "",
1662
+ "target_delimiter": " ",
1663
+ "fewshot_delimiter": "\n\n",
1664
+ "num_fewshot": 0,
1665
+ "metric_list": [
1666
+ {
1667
+ "metric": "acc"
1668
+ }
1669
+ ],
1670
+ "output_type": "multiple_choice",
1671
+ "repeats": 1,
1672
+ "should_decontaminate": true,
1673
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1674
+ "metadata": {
1675
+ "version": 1.0
1676
+ }
1677
+ },
1678
+ "blimp_sentential_negation_npi_licensor_present": {
1679
+ "task": "blimp_sentential_negation_npi_licensor_present",
1680
+ "group": "blimp",
1681
+ "dataset_path": "blimp",
1682
+ "dataset_name": "sentential_negation_npi_licensor_present",
1683
+ "validation_split": "train",
1684
+ "doc_to_text": "",
1685
+ "doc_to_target": 0,
1686
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1687
+ "description": "",
1688
+ "target_delimiter": " ",
1689
+ "fewshot_delimiter": "\n\n",
1690
+ "num_fewshot": 0,
1691
+ "metric_list": [
1692
+ {
1693
+ "metric": "acc"
1694
+ }
1695
+ ],
1696
+ "output_type": "multiple_choice",
1697
+ "repeats": 1,
1698
+ "should_decontaminate": true,
1699
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1700
+ "metadata": {
1701
+ "version": 1.0
1702
+ }
1703
+ },
1704
+ "blimp_sentential_negation_npi_scope": {
1705
+ "task": "blimp_sentential_negation_npi_scope",
1706
+ "group": "blimp",
1707
+ "dataset_path": "blimp",
1708
+ "dataset_name": "sentential_negation_npi_scope",
1709
+ "validation_split": "train",
1710
+ "doc_to_text": "",
1711
+ "doc_to_target": 0,
1712
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1713
+ "description": "",
1714
+ "target_delimiter": " ",
1715
+ "fewshot_delimiter": "\n\n",
1716
+ "num_fewshot": 0,
1717
+ "metric_list": [
1718
+ {
1719
+ "metric": "acc"
1720
+ }
1721
+ ],
1722
+ "output_type": "multiple_choice",
1723
+ "repeats": 1,
1724
+ "should_decontaminate": true,
1725
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1726
+ "metadata": {
1727
+ "version": 1.0
1728
+ }
1729
+ },
1730
+ "blimp_sentential_subject_island": {
1731
+ "task": "blimp_sentential_subject_island",
1732
+ "group": "blimp",
1733
+ "dataset_path": "blimp",
1734
+ "dataset_name": "sentential_subject_island",
1735
+ "validation_split": "train",
1736
+ "doc_to_text": "",
1737
+ "doc_to_target": 0,
1738
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1739
+ "description": "",
1740
+ "target_delimiter": " ",
1741
+ "fewshot_delimiter": "\n\n",
1742
+ "num_fewshot": 0,
1743
+ "metric_list": [
1744
+ {
1745
+ "metric": "acc"
1746
+ }
1747
+ ],
1748
+ "output_type": "multiple_choice",
1749
+ "repeats": 1,
1750
+ "should_decontaminate": true,
1751
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1752
+ "metadata": {
1753
+ "version": 1.0
1754
+ }
1755
+ },
1756
+ "blimp_superlative_quantifiers_1": {
1757
+ "task": "blimp_superlative_quantifiers_1",
1758
+ "group": "blimp",
1759
+ "dataset_path": "blimp",
1760
+ "dataset_name": "superlative_quantifiers_1",
1761
+ "validation_split": "train",
1762
+ "doc_to_text": "",
1763
+ "doc_to_target": 0,
1764
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1765
+ "description": "",
1766
+ "target_delimiter": " ",
1767
+ "fewshot_delimiter": "\n\n",
1768
+ "num_fewshot": 0,
1769
+ "metric_list": [
1770
+ {
1771
+ "metric": "acc"
1772
+ }
1773
+ ],
1774
+ "output_type": "multiple_choice",
1775
+ "repeats": 1,
1776
+ "should_decontaminate": true,
1777
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1778
+ "metadata": {
1779
+ "version": 1.0
1780
+ }
1781
+ },
1782
+ "blimp_superlative_quantifiers_2": {
1783
+ "task": "blimp_superlative_quantifiers_2",
1784
+ "group": "blimp",
1785
+ "dataset_path": "blimp",
1786
+ "dataset_name": "superlative_quantifiers_2",
1787
+ "validation_split": "train",
1788
+ "doc_to_text": "",
1789
+ "doc_to_target": 0,
1790
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1791
+ "description": "",
1792
+ "target_delimiter": " ",
1793
+ "fewshot_delimiter": "\n\n",
1794
+ "num_fewshot": 0,
1795
+ "metric_list": [
1796
+ {
1797
+ "metric": "acc"
1798
+ }
1799
+ ],
1800
+ "output_type": "multiple_choice",
1801
+ "repeats": 1,
1802
+ "should_decontaminate": true,
1803
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1804
+ "metadata": {
1805
+ "version": 1.0
1806
+ }
1807
+ },
1808
+ "blimp_tough_vs_raising_1": {
1809
+ "task": "blimp_tough_vs_raising_1",
1810
+ "group": "blimp",
1811
+ "dataset_path": "blimp",
1812
+ "dataset_name": "tough_vs_raising_1",
1813
+ "validation_split": "train",
1814
+ "doc_to_text": "",
1815
+ "doc_to_target": 0,
1816
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1817
+ "description": "",
1818
+ "target_delimiter": " ",
1819
+ "fewshot_delimiter": "\n\n",
1820
+ "num_fewshot": 0,
1821
+ "metric_list": [
1822
+ {
1823
+ "metric": "acc"
1824
+ }
1825
+ ],
1826
+ "output_type": "multiple_choice",
1827
+ "repeats": 1,
1828
+ "should_decontaminate": true,
1829
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1830
+ "metadata": {
1831
+ "version": 1.0
1832
+ }
1833
+ },
1834
+ "blimp_tough_vs_raising_2": {
1835
+ "task": "blimp_tough_vs_raising_2",
1836
+ "group": "blimp",
1837
+ "dataset_path": "blimp",
1838
+ "dataset_name": "tough_vs_raising_2",
1839
+ "validation_split": "train",
1840
+ "doc_to_text": "",
1841
+ "doc_to_target": 0,
1842
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1843
+ "description": "",
1844
+ "target_delimiter": " ",
1845
+ "fewshot_delimiter": "\n\n",
1846
+ "num_fewshot": 0,
1847
+ "metric_list": [
1848
+ {
1849
+ "metric": "acc"
1850
+ }
1851
+ ],
1852
+ "output_type": "multiple_choice",
1853
+ "repeats": 1,
1854
+ "should_decontaminate": true,
1855
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1856
+ "metadata": {
1857
+ "version": 1.0
1858
+ }
1859
+ },
1860
+ "blimp_transitive": {
1861
+ "task": "blimp_transitive",
1862
+ "group": "blimp",
1863
+ "dataset_path": "blimp",
1864
+ "dataset_name": "transitive",
1865
+ "validation_split": "train",
1866
+ "doc_to_text": "",
1867
+ "doc_to_target": 0,
1868
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1869
+ "description": "",
1870
+ "target_delimiter": " ",
1871
+ "fewshot_delimiter": "\n\n",
1872
+ "num_fewshot": 0,
1873
+ "metric_list": [
1874
+ {
1875
+ "metric": "acc"
1876
+ }
1877
+ ],
1878
+ "output_type": "multiple_choice",
1879
+ "repeats": 1,
1880
+ "should_decontaminate": true,
1881
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1882
+ "metadata": {
1883
+ "version": 1.0
1884
+ }
1885
+ },
1886
+ "blimp_wh_island": {
1887
+ "task": "blimp_wh_island",
1888
+ "group": "blimp",
1889
+ "dataset_path": "blimp",
1890
+ "dataset_name": "wh_island",
1891
+ "validation_split": "train",
1892
+ "doc_to_text": "",
1893
+ "doc_to_target": 0,
1894
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1895
+ "description": "",
1896
+ "target_delimiter": " ",
1897
+ "fewshot_delimiter": "\n\n",
1898
+ "num_fewshot": 0,
1899
+ "metric_list": [
1900
+ {
1901
+ "metric": "acc"
1902
+ }
1903
+ ],
1904
+ "output_type": "multiple_choice",
1905
+ "repeats": 1,
1906
+ "should_decontaminate": true,
1907
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1908
+ "metadata": {
1909
+ "version": 1.0
1910
+ }
1911
+ },
1912
+ "blimp_wh_questions_object_gap": {
1913
+ "task": "blimp_wh_questions_object_gap",
1914
+ "group": "blimp",
1915
+ "dataset_path": "blimp",
1916
+ "dataset_name": "wh_questions_object_gap",
1917
+ "validation_split": "train",
1918
+ "doc_to_text": "",
1919
+ "doc_to_target": 0,
1920
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1921
+ "description": "",
1922
+ "target_delimiter": " ",
1923
+ "fewshot_delimiter": "\n\n",
1924
+ "num_fewshot": 0,
1925
+ "metric_list": [
1926
+ {
1927
+ "metric": "acc"
1928
+ }
1929
+ ],
1930
+ "output_type": "multiple_choice",
1931
+ "repeats": 1,
1932
+ "should_decontaminate": true,
1933
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1934
+ "metadata": {
1935
+ "version": 1.0
1936
+ }
1937
+ },
1938
+ "blimp_wh_questions_subject_gap": {
1939
+ "task": "blimp_wh_questions_subject_gap",
1940
+ "group": "blimp",
1941
+ "dataset_path": "blimp",
1942
+ "dataset_name": "wh_questions_subject_gap",
1943
+ "validation_split": "train",
1944
+ "doc_to_text": "",
1945
+ "doc_to_target": 0,
1946
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1947
+ "description": "",
1948
+ "target_delimiter": " ",
1949
+ "fewshot_delimiter": "\n\n",
1950
+ "num_fewshot": 0,
1951
+ "metric_list": [
1952
+ {
1953
+ "metric": "acc"
1954
+ }
1955
+ ],
1956
+ "output_type": "multiple_choice",
1957
+ "repeats": 1,
1958
+ "should_decontaminate": true,
1959
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1960
+ "metadata": {
1961
+ "version": 1.0
1962
+ }
1963
+ },
1964
+ "blimp_wh_questions_subject_gap_long_distance": {
1965
+ "task": "blimp_wh_questions_subject_gap_long_distance",
1966
+ "group": "blimp",
1967
+ "dataset_path": "blimp",
1968
+ "dataset_name": "wh_questions_subject_gap_long_distance",
1969
+ "validation_split": "train",
1970
+ "doc_to_text": "",
1971
+ "doc_to_target": 0,
1972
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1973
+ "description": "",
1974
+ "target_delimiter": " ",
1975
+ "fewshot_delimiter": "\n\n",
1976
+ "num_fewshot": 0,
1977
+ "metric_list": [
1978
+ {
1979
+ "metric": "acc"
1980
+ }
1981
+ ],
1982
+ "output_type": "multiple_choice",
1983
+ "repeats": 1,
1984
+ "should_decontaminate": true,
1985
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1986
+ "metadata": {
1987
+ "version": 1.0
1988
+ }
1989
+ },
1990
+ "blimp_wh_vs_that_no_gap": {
1991
+ "task": "blimp_wh_vs_that_no_gap",
1992
+ "group": "blimp",
1993
+ "dataset_path": "blimp",
1994
+ "dataset_name": "wh_vs_that_no_gap",
1995
+ "validation_split": "train",
1996
+ "doc_to_text": "",
1997
+ "doc_to_target": 0,
1998
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1999
+ "description": "",
2000
+ "target_delimiter": " ",
2001
+ "fewshot_delimiter": "\n\n",
2002
+ "num_fewshot": 0,
2003
+ "metric_list": [
2004
+ {
2005
+ "metric": "acc"
2006
+ }
2007
+ ],
2008
+ "output_type": "multiple_choice",
2009
+ "repeats": 1,
2010
+ "should_decontaminate": true,
2011
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2012
+ "metadata": {
2013
+ "version": 1.0
2014
+ }
2015
+ },
2016
+ "blimp_wh_vs_that_no_gap_long_distance": {
2017
+ "task": "blimp_wh_vs_that_no_gap_long_distance",
2018
+ "group": "blimp",
2019
+ "dataset_path": "blimp",
2020
+ "dataset_name": "wh_vs_that_no_gap_long_distance",
2021
+ "validation_split": "train",
2022
+ "doc_to_text": "",
2023
+ "doc_to_target": 0,
2024
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2025
+ "description": "",
2026
+ "target_delimiter": " ",
2027
+ "fewshot_delimiter": "\n\n",
2028
+ "num_fewshot": 0,
2029
+ "metric_list": [
2030
+ {
2031
+ "metric": "acc"
2032
+ }
2033
+ ],
2034
+ "output_type": "multiple_choice",
2035
+ "repeats": 1,
2036
+ "should_decontaminate": true,
2037
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2038
+ "metadata": {
2039
+ "version": 1.0
2040
+ }
2041
+ },
2042
+ "blimp_wh_vs_that_with_gap": {
2043
+ "task": "blimp_wh_vs_that_with_gap",
2044
+ "group": "blimp",
2045
+ "dataset_path": "blimp",
2046
+ "dataset_name": "wh_vs_that_with_gap",
2047
+ "validation_split": "train",
2048
+ "doc_to_text": "",
2049
+ "doc_to_target": 0,
2050
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2051
+ "description": "",
2052
+ "target_delimiter": " ",
2053
+ "fewshot_delimiter": "\n\n",
2054
+ "num_fewshot": 0,
2055
+ "metric_list": [
2056
+ {
2057
+ "metric": "acc"
2058
+ }
2059
+ ],
2060
+ "output_type": "multiple_choice",
2061
+ "repeats": 1,
2062
+ "should_decontaminate": true,
2063
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2064
+ "metadata": {
2065
+ "version": 1.0
2066
+ }
2067
+ },
2068
+ "blimp_wh_vs_that_with_gap_long_distance": {
2069
+ "task": "blimp_wh_vs_that_with_gap_long_distance",
2070
+ "group": "blimp",
2071
+ "dataset_path": "blimp",
2072
+ "dataset_name": "wh_vs_that_with_gap_long_distance",
2073
+ "validation_split": "train",
2074
+ "doc_to_text": "",
2075
+ "doc_to_target": 0,
2076
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2077
+ "description": "",
2078
+ "target_delimiter": " ",
2079
+ "fewshot_delimiter": "\n\n",
2080
+ "num_fewshot": 0,
2081
+ "metric_list": [
2082
+ {
2083
+ "metric": "acc"
2084
+ }
2085
+ ],
2086
+ "output_type": "multiple_choice",
2087
+ "repeats": 1,
2088
+ "should_decontaminate": true,
2089
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2090
+ "metadata": {
2091
+ "version": 1.0
2092
+ }
2093
+ }
2094
+ },
2095
+ "versions": {
2096
+ "blimp": "N/A",
2097
+ "blimp_adjunct_island": 1.0,
2098
+ "blimp_anaphor_gender_agreement": 1.0,
2099
+ "blimp_anaphor_number_agreement": 1.0,
2100
+ "blimp_animate_subject_passive": 1.0,
2101
+ "blimp_animate_subject_trans": 1.0,
2102
+ "blimp_causative": 1.0,
2103
+ "blimp_complex_NP_island": 1.0,
2104
+ "blimp_coordinate_structure_constraint_complex_left_branch": 1.0,
2105
+ "blimp_coordinate_structure_constraint_object_extraction": 1.0,
2106
+ "blimp_determiner_noun_agreement_1": 1.0,
2107
+ "blimp_determiner_noun_agreement_2": 1.0,
2108
+ "blimp_determiner_noun_agreement_irregular_1": 1.0,
2109
+ "blimp_determiner_noun_agreement_irregular_2": 1.0,
2110
+ "blimp_determiner_noun_agreement_with_adj_2": 1.0,
2111
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0,
2112
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0,
2113
+ "blimp_determiner_noun_agreement_with_adjective_1": 1.0,
2114
+ "blimp_distractor_agreement_relational_noun": 1.0,
2115
+ "blimp_distractor_agreement_relative_clause": 1.0,
2116
+ "blimp_drop_argument": 1.0,
2117
+ "blimp_ellipsis_n_bar_1": 1.0,
2118
+ "blimp_ellipsis_n_bar_2": 1.0,
2119
+ "blimp_existential_there_object_raising": 1.0,
2120
+ "blimp_existential_there_quantifiers_1": 1.0,
2121
+ "blimp_existential_there_quantifiers_2": 1.0,
2122
+ "blimp_existential_there_subject_raising": 1.0,
2123
+ "blimp_expletive_it_object_raising": 1.0,
2124
+ "blimp_inchoative": 1.0,
2125
+ "blimp_intransitive": 1.0,
2126
+ "blimp_irregular_past_participle_adjectives": 1.0,
2127
+ "blimp_irregular_past_participle_verbs": 1.0,
2128
+ "blimp_irregular_plural_subject_verb_agreement_1": 1.0,
2129
+ "blimp_irregular_plural_subject_verb_agreement_2": 1.0,
2130
+ "blimp_left_branch_island_echo_question": 1.0,
2131
+ "blimp_left_branch_island_simple_question": 1.0,
2132
+ "blimp_matrix_question_npi_licensor_present": 1.0,
2133
+ "blimp_npi_present_1": 1.0,
2134
+ "blimp_npi_present_2": 1.0,
2135
+ "blimp_only_npi_licensor_present": 1.0,
2136
+ "blimp_only_npi_scope": 1.0,
2137
+ "blimp_passive_1": 1.0,
2138
+ "blimp_passive_2": 1.0,
2139
+ "blimp_principle_A_c_command": 1.0,
2140
+ "blimp_principle_A_case_1": 1.0,
2141
+ "blimp_principle_A_case_2": 1.0,
2142
+ "blimp_principle_A_domain_1": 1.0,
2143
+ "blimp_principle_A_domain_2": 1.0,
2144
+ "blimp_principle_A_domain_3": 1.0,
2145
+ "blimp_principle_A_reconstruction": 1.0,
2146
+ "blimp_regular_plural_subject_verb_agreement_1": 1.0,
2147
+ "blimp_regular_plural_subject_verb_agreement_2": 1.0,
2148
+ "blimp_sentential_negation_npi_licensor_present": 1.0,
2149
+ "blimp_sentential_negation_npi_scope": 1.0,
2150
+ "blimp_sentential_subject_island": 1.0,
2151
+ "blimp_superlative_quantifiers_1": 1.0,
2152
+ "blimp_superlative_quantifiers_2": 1.0,
2153
+ "blimp_tough_vs_raising_1": 1.0,
2154
+ "blimp_tough_vs_raising_2": 1.0,
2155
+ "blimp_transitive": 1.0,
2156
+ "blimp_wh_island": 1.0,
2157
+ "blimp_wh_questions_object_gap": 1.0,
2158
+ "blimp_wh_questions_subject_gap": 1.0,
2159
+ "blimp_wh_questions_subject_gap_long_distance": 1.0,
2160
+ "blimp_wh_vs_that_no_gap": 1.0,
2161
+ "blimp_wh_vs_that_no_gap_long_distance": 1.0,
2162
+ "blimp_wh_vs_that_with_gap": 1.0,
2163
+ "blimp_wh_vs_that_with_gap_long_distance": 1.0
2164
+ },
2165
+ "n-shot": {
2166
+ "blimp": 0,
2167
+ "blimp_adjunct_island": 0,
2168
+ "blimp_anaphor_gender_agreement": 0,
2169
+ "blimp_anaphor_number_agreement": 0,
2170
+ "blimp_animate_subject_passive": 0,
2171
+ "blimp_animate_subject_trans": 0,
2172
+ "blimp_causative": 0,
2173
+ "blimp_complex_NP_island": 0,
2174
+ "blimp_coordinate_structure_constraint_complex_left_branch": 0,
2175
+ "blimp_coordinate_structure_constraint_object_extraction": 0,
2176
+ "blimp_determiner_noun_agreement_1": 0,
2177
+ "blimp_determiner_noun_agreement_2": 0,
2178
+ "blimp_determiner_noun_agreement_irregular_1": 0,
2179
+ "blimp_determiner_noun_agreement_irregular_2": 0,
2180
+ "blimp_determiner_noun_agreement_with_adj_2": 0,
2181
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
2182
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
2183
+ "blimp_determiner_noun_agreement_with_adjective_1": 0,
2184
+ "blimp_distractor_agreement_relational_noun": 0,
2185
+ "blimp_distractor_agreement_relative_clause": 0,
2186
+ "blimp_drop_argument": 0,
2187
+ "blimp_ellipsis_n_bar_1": 0,
2188
+ "blimp_ellipsis_n_bar_2": 0,
2189
+ "blimp_existential_there_object_raising": 0,
2190
+ "blimp_existential_there_quantifiers_1": 0,
2191
+ "blimp_existential_there_quantifiers_2": 0,
2192
+ "blimp_existential_there_subject_raising": 0,
2193
+ "blimp_expletive_it_object_raising": 0,
2194
+ "blimp_inchoative": 0,
2195
+ "blimp_intransitive": 0,
2196
+ "blimp_irregular_past_participle_adjectives": 0,
2197
+ "blimp_irregular_past_participle_verbs": 0,
2198
+ "blimp_irregular_plural_subject_verb_agreement_1": 0,
2199
+ "blimp_irregular_plural_subject_verb_agreement_2": 0,
2200
+ "blimp_left_branch_island_echo_question": 0,
2201
+ "blimp_left_branch_island_simple_question": 0,
2202
+ "blimp_matrix_question_npi_licensor_present": 0,
2203
+ "blimp_npi_present_1": 0,
2204
+ "blimp_npi_present_2": 0,
2205
+ "blimp_only_npi_licensor_present": 0,
2206
+ "blimp_only_npi_scope": 0,
2207
+ "blimp_passive_1": 0,
2208
+ "blimp_passive_2": 0,
2209
+ "blimp_principle_A_c_command": 0,
2210
+ "blimp_principle_A_case_1": 0,
2211
+ "blimp_principle_A_case_2": 0,
2212
+ "blimp_principle_A_domain_1": 0,
2213
+ "blimp_principle_A_domain_2": 0,
2214
+ "blimp_principle_A_domain_3": 0,
2215
+ "blimp_principle_A_reconstruction": 0,
2216
+ "blimp_regular_plural_subject_verb_agreement_1": 0,
2217
+ "blimp_regular_plural_subject_verb_agreement_2": 0,
2218
+ "blimp_sentential_negation_npi_licensor_present": 0,
2219
+ "blimp_sentential_negation_npi_scope": 0,
2220
+ "blimp_sentential_subject_island": 0,
2221
+ "blimp_superlative_quantifiers_1": 0,
2222
+ "blimp_superlative_quantifiers_2": 0,
2223
+ "blimp_tough_vs_raising_1": 0,
2224
+ "blimp_tough_vs_raising_2": 0,
2225
+ "blimp_transitive": 0,
2226
+ "blimp_wh_island": 0,
2227
+ "blimp_wh_questions_object_gap": 0,
2228
+ "blimp_wh_questions_subject_gap": 0,
2229
+ "blimp_wh_questions_subject_gap_long_distance": 0,
2230
+ "blimp_wh_vs_that_no_gap": 0,
2231
+ "blimp_wh_vs_that_no_gap_long_distance": 0,
2232
+ "blimp_wh_vs_that_with_gap": 0,
2233
+ "blimp_wh_vs_that_with_gap_long_distance": 0
2234
+ },
2235
+ "config": {
2236
+ "model": "hf",
2237
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
2238
+ "batch_size": "auto",
2239
+ "batch_sizes": [
2240
+ 64
2241
+ ],
2242
+ "device": null,
2243
+ "use_cache": null,
2244
+ "limit": null,
2245
+ "bootstrap_iters": 100000,
2246
+ "gen_kwargs": null
2247
+ },
2248
+ "git_hash": "1ee41f7"
2249
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89cbb54498164135945ff3ae30ba8b91824a5e591209160a6134abba241f273c
3
+ size 318042
lm-eval-output/SmerkyG/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
The diff for this file is too large to render. See raw diff
 
lm-eval-output/SmerkyG/rwkv-5-world-1b5/cmmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce5b0b9ddb9bb2a2f58452a6384f7e7a0172502934d1dcc703a282fbc958f876
3
+ size 148126
lm-eval-output/SmerkyG/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "copa": {
4
+ "acc,none": 0.76,
5
+ "acc_stderr,none": 0.04292346959909284,
6
+ "alias": "copa"
7
+ }
8
+ },
9
+ "configs": {
10
+ "copa": {
11
+ "task": "copa",
12
+ "group": [
13
+ "super-glue-lm-eval-v1"
14
+ ],
15
+ "dataset_path": "super_glue",
16
+ "dataset_name": "copa",
17
+ "training_split": "train",
18
+ "validation_split": "validation",
19
+ "doc_to_text": "def doc_to_text(doc):\n # Drop the period\n connector = {\n \"cause\": \"because\",\n \"effect\": \"therefore\",\n }[doc[\"question\"]]\n return doc[\"premise\"].strip()[:-1] + f\" {connector}\"\n",
20
+ "doc_to_target": "def doc_to_target(doc):\n correct_choice = doc[\"choice1\"] if doc[\"label\"] == 0 else doc[\"choice2\"]\n # Connect the sentences\n return \" \" + convert_choice(correct_choice)\n",
21
+ "doc_to_choice": "def doc_to_choice(doc):\n return [\" \" + convert_choice(doc[\"choice1\"]), \" \" + convert_choice(doc[\"choice2\"])]\n",
22
+ "description": "",
23
+ "target_delimiter": " ",
24
+ "fewshot_delimiter": "\n\n",
25
+ "metric_list": [
26
+ {
27
+ "metric": "acc"
28
+ }
29
+ ],
30
+ "output_type": "multiple_choice",
31
+ "repeats": 1,
32
+ "should_decontaminate": false,
33
+ "metadata": {
34
+ "version": 1.0
35
+ }
36
+ }
37
+ },
38
+ "versions": {
39
+ "copa": 1.0
40
+ },
41
+ "n-shot": {
42
+ "copa": 0
43
+ },
44
+ "config": {
45
+ "model": "hf",
46
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
47
+ "batch_size": "auto",
48
+ "batch_sizes": [
49
+ 64
50
+ ],
51
+ "device": null,
52
+ "use_cache": null,
53
+ "limit": null,
54
+ "bootstrap_iters": 100000,
55
+ "gen_kwargs": null
56
+ },
57
+ "git_hash": "1ee41f7"
58
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/copa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b89275fc3520f76c9d199d3f7145a423401188faecfaf1e51cb9de291445fbf0
3
+ size 38853
lm-eval-output/SmerkyG/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,374 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "glue": {
4
+ "acc,none": 0.5410165555026203,
5
+ "acc_stderr,none": 0.012289708247379585,
6
+ "f1,none": 0.3991229231036883,
7
+ "f1_stderr,none": 0.00018823773677900912,
8
+ "mcc,none": 0.028777377059353095,
9
+ "mcc_stderr,none": 0.029557452442007595,
10
+ "alias": "glue"
11
+ },
12
+ "cola": {
13
+ "mcc,none": 0.028777377059353095,
14
+ "mcc_stderr,none": 0.029557452442007595,
15
+ "alias": " - cola"
16
+ },
17
+ "mnli": {
18
+ "acc,none": 0.3502801833927662,
19
+ "acc_stderr,none": 0.004815571260570184,
20
+ "alias": " - mnli"
21
+ },
22
+ "mnli_mismatch": {
23
+ "acc,none": 0.3463181448331977,
24
+ "acc_stderr,none": 0.004798682211884212,
25
+ "alias": " - mnli_mismatch"
26
+ },
27
+ "mrpc": {
28
+ "acc,none": 0.37254901960784315,
29
+ "acc_stderr,none": 0.02396538492671658,
30
+ "f1,none": 0.26011560693641617,
31
+ "f1_stderr,none": 0.03106858780787724,
32
+ "alias": " - mrpc"
33
+ },
34
+ "qnli": {
35
+ "acc,none": 0.5052169137836353,
36
+ "acc_stderr,none": 0.006765042284363289,
37
+ "alias": " - qnli"
38
+ },
39
+ "qqp": {
40
+ "acc,none": 0.6368290873114024,
41
+ "acc_stderr,none": 0.002391775841486003,
42
+ "f1,none": 0.4003267306514192,
43
+ "f1_stderr,none": 0.003952746364902292,
44
+ "alias": " - qqp"
45
+ },
46
+ "rte": {
47
+ "acc,none": 0.51985559566787,
48
+ "acc_stderr,none": 0.030072723167317184,
49
+ "alias": " - rte"
50
+ },
51
+ "sst2": {
52
+ "acc,none": 0.7568807339449541,
53
+ "acc_stderr,none": 0.01453497656207427,
54
+ "alias": " - sst2"
55
+ },
56
+ "wnli": {
57
+ "acc,none": 0.4647887323943662,
58
+ "acc_stderr,none": 0.0596130578497224,
59
+ "alias": " - wnli"
60
+ }
61
+ },
62
+ "groups": {
63
+ "glue": {
64
+ "acc,none": 0.5410165555026203,
65
+ "acc_stderr,none": 0.012289708247379585,
66
+ "f1,none": 0.3991229231036883,
67
+ "f1_stderr,none": 0.00018823773677900912,
68
+ "mcc,none": 0.028777377059353095,
69
+ "mcc_stderr,none": 0.029557452442007595,
70
+ "alias": "glue"
71
+ }
72
+ },
73
+ "configs": {
74
+ "cola": {
75
+ "task": "cola",
76
+ "group": "glue",
77
+ "dataset_path": "glue",
78
+ "dataset_name": "cola",
79
+ "training_split": "train",
80
+ "validation_split": "validation",
81
+ "doc_to_text": "{{sentence}}\nQuestion: Does this sentence make sense?\nAnswer:",
82
+ "doc_to_target": "label",
83
+ "doc_to_choice": [
84
+ "no",
85
+ "yes"
86
+ ],
87
+ "description": "",
88
+ "target_delimiter": " ",
89
+ "fewshot_delimiter": "\n\n",
90
+ "metric_list": [
91
+ {
92
+ "metric": "mcc"
93
+ }
94
+ ],
95
+ "output_type": "multiple_choice",
96
+ "repeats": 1,
97
+ "should_decontaminate": true,
98
+ "doc_to_decontamination_query": "sentence",
99
+ "metadata": {
100
+ "version": 1.0
101
+ }
102
+ },
103
+ "mnli": {
104
+ "task": "mnli",
105
+ "group": "glue",
106
+ "dataset_path": "glue",
107
+ "dataset_name": "mnli",
108
+ "training_split": "train",
109
+ "validation_split": "validation_matched",
110
+ "doc_to_text": "def doc_to_text(doc) -> str:\n return \"{}\\nQuestion: {} True, False or Neither?\\nAnswer:\".format(\n doc[\"premise\"],\n doc[\"hypothesis\"].strip()\n + (\"\" if doc[\"hypothesis\"].strip().endswith(\".\") else \".\"),\n )\n",
111
+ "doc_to_target": "label",
112
+ "doc_to_choice": [
113
+ "True",
114
+ "Neither",
115
+ "False"
116
+ ],
117
+ "description": "",
118
+ "target_delimiter": " ",
119
+ "fewshot_delimiter": "\n\n",
120
+ "metric_list": [
121
+ {
122
+ "metric": "acc"
123
+ }
124
+ ],
125
+ "output_type": "multiple_choice",
126
+ "repeats": 1,
127
+ "should_decontaminate": false,
128
+ "metadata": {
129
+ "version": 1.0
130
+ }
131
+ },
132
+ "mnli_mismatch": {
133
+ "task": "mnli_mismatch",
134
+ "group": "glue",
135
+ "dataset_path": "glue",
136
+ "dataset_name": "mnli",
137
+ "training_split": "train",
138
+ "validation_split": "validation_mismatched",
139
+ "doc_to_text": "def doc_to_text(doc) -> str:\n return \"{}\\nQuestion: {} True, False or Neither?\\nAnswer:\".format(\n doc[\"premise\"],\n doc[\"hypothesis\"].strip()\n + (\"\" if doc[\"hypothesis\"].strip().endswith(\".\") else \".\"),\n )\n",
140
+ "doc_to_target": "label",
141
+ "doc_to_choice": [
142
+ "True",
143
+ "Neither",
144
+ "False"
145
+ ],
146
+ "description": "",
147
+ "target_delimiter": " ",
148
+ "fewshot_delimiter": "\n\n",
149
+ "metric_list": [
150
+ {
151
+ "metric": "acc"
152
+ }
153
+ ],
154
+ "output_type": "multiple_choice",
155
+ "repeats": 1,
156
+ "should_decontaminate": false,
157
+ "metadata": {
158
+ "version": 1.0
159
+ }
160
+ },
161
+ "mrpc": {
162
+ "task": "mrpc",
163
+ "group": "glue",
164
+ "dataset_path": "glue",
165
+ "dataset_name": "mrpc",
166
+ "training_split": "train",
167
+ "validation_split": "validation",
168
+ "doc_to_text": "Sentence 1: {{sentence1}}\nSentence 2: {{sentence2}}\nQuestion: Do both sentences mean the same thing?\nAnswer:",
169
+ "doc_to_target": "label",
170
+ "doc_to_choice": [
171
+ "no",
172
+ "yes"
173
+ ],
174
+ "description": "",
175
+ "target_delimiter": " ",
176
+ "fewshot_delimiter": "\n\n",
177
+ "metric_list": [
178
+ {
179
+ "metric": "acc"
180
+ },
181
+ {
182
+ "metric": "f1"
183
+ }
184
+ ],
185
+ "output_type": "multiple_choice",
186
+ "repeats": 1,
187
+ "should_decontaminate": false,
188
+ "metadata": {
189
+ "version": 1.0
190
+ }
191
+ },
192
+ "qnli": {
193
+ "task": "qnli",
194
+ "group": "glue",
195
+ "dataset_path": "glue",
196
+ "dataset_name": "qnli",
197
+ "training_split": "train",
198
+ "validation_split": "validation",
199
+ "doc_to_text": "{{question}}\n{{sentence}}\nQuestion: Does this response answer the question?\nAnswer:",
200
+ "doc_to_target": "label",
201
+ "doc_to_choice": [
202
+ "yes",
203
+ "no"
204
+ ],
205
+ "description": "",
206
+ "target_delimiter": " ",
207
+ "fewshot_delimiter": "\n\n",
208
+ "metric_list": [
209
+ {
210
+ "metric": "acc"
211
+ }
212
+ ],
213
+ "output_type": "multiple_choice",
214
+ "repeats": 1,
215
+ "should_decontaminate": false,
216
+ "metadata": {
217
+ "version": 1.0
218
+ }
219
+ },
220
+ "qqp": {
221
+ "task": "qqp",
222
+ "group": "glue",
223
+ "dataset_path": "glue",
224
+ "dataset_name": "qqp",
225
+ "training_split": "train",
226
+ "validation_split": "validation",
227
+ "doc_to_text": "\nSentence 1: {{question1}}\nSentence 2: {{question2}}\nAnswer:",
228
+ "doc_to_target": "label",
229
+ "doc_to_choice": [
230
+ "no",
231
+ "yes"
232
+ ],
233
+ "description": "",
234
+ "target_delimiter": " ",
235
+ "fewshot_delimiter": "\n\n",
236
+ "metric_list": [
237
+ {
238
+ "metric": "acc"
239
+ },
240
+ {
241
+ "metric": "f1"
242
+ }
243
+ ],
244
+ "output_type": "multiple_choice",
245
+ "repeats": 1,
246
+ "should_decontaminate": false,
247
+ "metadata": {
248
+ "version": 1.0
249
+ }
250
+ },
251
+ "rte": {
252
+ "task": "rte",
253
+ "group": "glue",
254
+ "dataset_path": "glue",
255
+ "dataset_name": "rte",
256
+ "training_split": "train",
257
+ "validation_split": "validation",
258
+ "doc_to_text": "{{sentence1}}\nQuestion: {{sentence2}} True or False?\nAnswer:",
259
+ "doc_to_target": "label",
260
+ "doc_to_choice": [
261
+ "True",
262
+ "False"
263
+ ],
264
+ "description": "",
265
+ "target_delimiter": " ",
266
+ "fewshot_delimiter": "\n\n",
267
+ "metric_list": [
268
+ {
269
+ "metric": "acc"
270
+ }
271
+ ],
272
+ "output_type": "multiple_choice",
273
+ "repeats": 1,
274
+ "should_decontaminate": false,
275
+ "metadata": {
276
+ "version": 1.0
277
+ }
278
+ },
279
+ "sst2": {
280
+ "task": "sst2",
281
+ "group": "glue",
282
+ "dataset_path": "glue",
283
+ "dataset_name": "sst2",
284
+ "training_split": "train",
285
+ "validation_split": "validation",
286
+ "doc_to_text": "{{sentence}}\nQuestion: Is this sentence positive or negative?\nAnswer:",
287
+ "doc_to_target": "label",
288
+ "doc_to_choice": [
289
+ "negative",
290
+ "positive"
291
+ ],
292
+ "description": "",
293
+ "target_delimiter": " ",
294
+ "fewshot_delimiter": "\n\n",
295
+ "metric_list": [
296
+ {
297
+ "metric": "acc"
298
+ }
299
+ ],
300
+ "output_type": "multiple_choice",
301
+ "repeats": 1,
302
+ "should_decontaminate": false,
303
+ "metadata": {
304
+ "version": 1.0
305
+ }
306
+ },
307
+ "wnli": {
308
+ "task": "wnli",
309
+ "group": "glue",
310
+ "dataset_path": "glue",
311
+ "dataset_name": "wnli",
312
+ "training_split": "train",
313
+ "validation_split": "validation",
314
+ "doc_to_text": "{{sentence1}}\nQuestion: {{sentence2}} True or False?\nAnswer:",
315
+ "doc_to_target": "label",
316
+ "doc_to_choice": [
317
+ "False",
318
+ "True"
319
+ ],
320
+ "description": "",
321
+ "target_delimiter": " ",
322
+ "fewshot_delimiter": "\n\n",
323
+ "metric_list": [
324
+ {
325
+ "metric": "acc"
326
+ }
327
+ ],
328
+ "output_type": "multiple_choice",
329
+ "repeats": 1,
330
+ "should_decontaminate": false,
331
+ "metadata": {
332
+ "version": 2.0
333
+ }
334
+ }
335
+ },
336
+ "versions": {
337
+ "cola": 1.0,
338
+ "glue": "N/A",
339
+ "mnli": 1.0,
340
+ "mnli_mismatch": 1.0,
341
+ "mrpc": 1.0,
342
+ "qnli": 1.0,
343
+ "qqp": 1.0,
344
+ "rte": 1.0,
345
+ "sst2": 1.0,
346
+ "wnli": 2.0
347
+ },
348
+ "n-shot": {
349
+ "cola": 0,
350
+ "glue": 0,
351
+ "mnli": 0,
352
+ "mnli_mismatch": 0,
353
+ "mrpc": 0,
354
+ "qnli": 0,
355
+ "qqp": 0,
356
+ "rte": 0,
357
+ "sst2": 0,
358
+ "wnli": 0
359
+ },
360
+ "config": {
361
+ "model": "hf",
362
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
363
+ "batch_size": "auto",
364
+ "batch_sizes": [
365
+ 64
366
+ ],
367
+ "device": null,
368
+ "use_cache": null,
369
+ "limit": null,
370
+ "bootstrap_iters": 100000,
371
+ "gen_kwargs": null
372
+ },
373
+ "git_hash": "1ee41f7"
374
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/glue/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97e96b14c2cb3ac4e0abc67fb944f31147a9fa20fb2aedaaeac5db3f0a20df4c
3
+ size 102917
lm-eval-output/SmerkyG/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "hellaswag": {
4
+ "acc,none": 0.42471619199362676,
5
+ "acc_stderr,none": 0.004932896472460568,
6
+ "acc_norm,none": 0.5501892053375822,
7
+ "acc_norm_stderr,none": 0.004964579685712438,
8
+ "alias": "hellaswag"
9
+ }
10
+ },
11
+ "configs": {
12
+ "hellaswag": {
13
+ "task": "hellaswag",
14
+ "group": [
15
+ "multiple_choice"
16
+ ],
17
+ "dataset_path": "hellaswag",
18
+ "training_split": "train",
19
+ "validation_split": "validation",
20
+ "process_docs": "def process_docs(dataset: datasets.Dataset) -> datasets.Dataset:\n def _process_doc(doc):\n ctx = doc[\"ctx_a\"] + \" \" + doc[\"ctx_b\"].capitalize()\n out_doc = {\n \"query\": preprocess(doc[\"activity_label\"] + \": \" + ctx),\n \"choices\": [preprocess(ending) for ending in doc[\"endings\"]],\n \"gold\": int(doc[\"label\"]),\n }\n return out_doc\n\n return dataset.map(_process_doc)\n",
21
+ "doc_to_text": "{{query}}",
22
+ "doc_to_target": "{{label}}",
23
+ "doc_to_choice": "choices",
24
+ "description": "",
25
+ "target_delimiter": " ",
26
+ "fewshot_delimiter": "\n\n",
27
+ "metric_list": [
28
+ {
29
+ "metric": "acc",
30
+ "aggregation": "mean",
31
+ "higher_is_better": true
32
+ },
33
+ {
34
+ "metric": "acc_norm",
35
+ "aggregation": "mean",
36
+ "higher_is_better": true
37
+ }
38
+ ],
39
+ "output_type": "multiple_choice",
40
+ "repeats": 1,
41
+ "should_decontaminate": false,
42
+ "metadata": {
43
+ "version": 1.0
44
+ }
45
+ }
46
+ },
47
+ "versions": {
48
+ "hellaswag": 1.0
49
+ },
50
+ "n-shot": {
51
+ "hellaswag": 0
52
+ },
53
+ "config": {
54
+ "model": "hf",
55
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
56
+ "batch_size": "auto",
57
+ "batch_sizes": [
58
+ 64
59
+ ],
60
+ "device": null,
61
+ "use_cache": null,
62
+ "limit": null,
63
+ "bootstrap_iters": 100000,
64
+ "gen_kwargs": null
65
+ },
66
+ "git_hash": "1ee41f7"
67
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/hellaswag/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a306a64419e9c542fa6abc1781910a4d4b282ddd0c1f6093aeb2e1c2b274b92
3
+ size 81828
lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada": {
4
+ "perplexity,none": 6.369187608169782,
5
+ "perplexity_stderr,none": 0.6794074695255675,
6
+ "acc,none": 0.6095478362119154,
7
+ "acc_stderr,none": 0.02462399103409058,
8
+ "alias": "lambada"
9
+ },
10
+ "lambada_openai": {
11
+ "perplexity,none": 5.0536798166390575,
12
+ "perplexity_stderr,none": 0.11842491248398582,
13
+ "acc,none": 0.6568988938482437,
14
+ "acc_stderr,none": 0.006614124982461028,
15
+ "alias": " - lambada_openai"
16
+ },
17
+ "lambada_standard": {
18
+ "perplexity,none": 7.684695399700504,
19
+ "perplexity_stderr,none": 0.20929842195468237,
20
+ "acc,none": 0.562196778575587,
21
+ "acc_stderr,none": 0.006911872616149982,
22
+ "alias": " - lambada_standard"
23
+ }
24
+ },
25
+ "groups": {
26
+ "lambada": {
27
+ "perplexity,none": 6.369187608169782,
28
+ "perplexity_stderr,none": 0.6794074695255675,
29
+ "acc,none": 0.6095478362119154,
30
+ "acc_stderr,none": 0.02462399103409058,
31
+ "alias": "lambada"
32
+ }
33
+ },
34
+ "configs": {
35
+ "lambada_openai": {
36
+ "task": "lambada_openai",
37
+ "group": [
38
+ "lambada"
39
+ ],
40
+ "dataset_path": "EleutherAI/lambada_openai",
41
+ "dataset_name": "default",
42
+ "test_split": "test",
43
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
44
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
45
+ "description": "",
46
+ "target_delimiter": " ",
47
+ "fewshot_delimiter": "\n\n",
48
+ "metric_list": [
49
+ {
50
+ "metric": "perplexity",
51
+ "aggregation": "perplexity",
52
+ "higher_is_better": false
53
+ },
54
+ {
55
+ "metric": "acc",
56
+ "aggregation": "mean",
57
+ "higher_is_better": true
58
+ }
59
+ ],
60
+ "output_type": "loglikelihood",
61
+ "repeats": 1,
62
+ "should_decontaminate": true,
63
+ "doc_to_decontamination_query": "{{text}}",
64
+ "metadata": {
65
+ "version": 1.0
66
+ }
67
+ },
68
+ "lambada_standard": {
69
+ "task": "lambada_standard",
70
+ "group": [
71
+ "lambada"
72
+ ],
73
+ "dataset_path": "lambada",
74
+ "validation_split": "validation",
75
+ "test_split": "test",
76
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
77
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
78
+ "description": "",
79
+ "target_delimiter": " ",
80
+ "fewshot_delimiter": "\n\n",
81
+ "metric_list": [
82
+ {
83
+ "metric": "perplexity",
84
+ "aggregation": "perplexity",
85
+ "higher_is_better": false
86
+ },
87
+ {
88
+ "metric": "acc",
89
+ "aggregation": "mean",
90
+ "higher_is_better": true
91
+ }
92
+ ],
93
+ "output_type": "loglikelihood",
94
+ "repeats": 1,
95
+ "should_decontaminate": true,
96
+ "doc_to_decontamination_query": "{{text}}",
97
+ "metadata": {
98
+ "version": 1.0
99
+ }
100
+ }
101
+ },
102
+ "versions": {
103
+ "lambada": "N/A",
104
+ "lambada_openai": 1.0,
105
+ "lambada_standard": 1.0
106
+ },
107
+ "n-shot": {
108
+ "lambada": 0,
109
+ "lambada_openai": 0,
110
+ "lambada_standard": 0
111
+ },
112
+ "config": {
113
+ "model": "hf",
114
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
115
+ "batch_size": "auto",
116
+ "batch_sizes": [
117
+ 64
118
+ ],
119
+ "device": null,
120
+ "use_cache": null,
121
+ "limit": null,
122
+ "bootstrap_iters": 100000,
123
+ "gen_kwargs": null
124
+ },
125
+ "git_hash": "1ee41f7"
126
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a324eedb84b2a9b3e9d538e30db7e1aaae7e3e66d68e7de29d28627092c3b10
3
+ size 48681
lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "lambada_multilingual": {
4
+ "perplexity,none": 43.18680498264333,
5
+ "perplexity_stderr,none": 16.58118499444968,
6
+ "acc,none": 0.4484766155637493,
7
+ "acc_stderr,none": 0.0830249431644644,
8
+ "alias": "lambada_multilingual"
9
+ },
10
+ "lambada_openai_mt_de": {
11
+ "perplexity,none": 65.82972989107675,
12
+ "perplexity_stderr,none": 3.9571956126281833,
13
+ "acc,none": 0.35066951290510384,
14
+ "acc_stderr,none": 0.006648045374603887,
15
+ "alias": " - lambada_openai_mt_de"
16
+ },
17
+ "lambada_openai_mt_en": {
18
+ "perplexity,none": 5.056405351554518,
19
+ "perplexity_stderr,none": 0.11860916891457675,
20
+ "acc,none": 0.6567048321366195,
21
+ "acc_stderr,none": 0.00661501790443367,
22
+ "alias": " - lambada_openai_mt_en"
23
+ },
24
+ "lambada_openai_mt_es": {
25
+ "perplexity,none": 61.249035187327245,
26
+ "perplexity_stderr,none": 3.3251943349532094,
27
+ "acc,none": 0.37104599262565496,
28
+ "acc_stderr,none": 0.006730314981342215,
29
+ "alias": " - lambada_openai_mt_es"
30
+ },
31
+ "lambada_openai_mt_fr": {
32
+ "perplexity,none": 34.89400012412681,
33
+ "perplexity_stderr,none": 1.8764986780815518,
34
+ "acc,none": 0.44944692412187076,
35
+ "acc_stderr,none": 0.006930281504471643,
36
+ "alias": " - lambada_openai_mt_fr"
37
+ },
38
+ "lambada_openai_mt_it": {
39
+ "perplexity,none": 48.90485435913133,
40
+ "perplexity_stderr,none": 2.8348284694345787,
41
+ "acc,none": 0.4145158160294974,
42
+ "acc_stderr,none": 0.006863414211397148,
43
+ "alias": " - lambada_openai_mt_it"
44
+ }
45
+ },
46
+ "groups": {
47
+ "lambada_multilingual": {
48
+ "perplexity,none": 43.18680498264333,
49
+ "perplexity_stderr,none": 16.58118499444968,
50
+ "acc,none": 0.4484766155637493,
51
+ "acc_stderr,none": 0.0830249431644644,
52
+ "alias": "lambada_multilingual"
53
+ }
54
+ },
55
+ "configs": {
56
+ "lambada_openai_mt_de": {
57
+ "task": "lambada_openai_mt_de",
58
+ "group": [
59
+ "lambada_multilingual"
60
+ ],
61
+ "dataset_path": "EleutherAI/lambada_openai",
62
+ "dataset_name": "de",
63
+ "test_split": "test",
64
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
65
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
66
+ "description": "",
67
+ "target_delimiter": " ",
68
+ "fewshot_delimiter": "\n\n",
69
+ "metric_list": [
70
+ {
71
+ "metric": "perplexity",
72
+ "aggregation": "perplexity",
73
+ "higher_is_better": false
74
+ },
75
+ {
76
+ "metric": "acc",
77
+ "aggregation": "mean",
78
+ "higher_is_better": true
79
+ }
80
+ ],
81
+ "output_type": "loglikelihood",
82
+ "repeats": 1,
83
+ "should_decontaminate": true,
84
+ "doc_to_decontamination_query": "{{text}}",
85
+ "metadata": {
86
+ "version": 1.0
87
+ }
88
+ },
89
+ "lambada_openai_mt_en": {
90
+ "task": "lambada_openai_mt_en",
91
+ "group": [
92
+ "lambada_multilingual"
93
+ ],
94
+ "dataset_path": "EleutherAI/lambada_openai",
95
+ "dataset_name": "en",
96
+ "test_split": "test",
97
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
98
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
99
+ "description": "",
100
+ "target_delimiter": " ",
101
+ "fewshot_delimiter": "\n\n",
102
+ "metric_list": [
103
+ {
104
+ "metric": "perplexity",
105
+ "aggregation": "perplexity",
106
+ "higher_is_better": false
107
+ },
108
+ {
109
+ "metric": "acc",
110
+ "aggregation": "mean",
111
+ "higher_is_better": true
112
+ }
113
+ ],
114
+ "output_type": "loglikelihood",
115
+ "repeats": 1,
116
+ "should_decontaminate": true,
117
+ "doc_to_decontamination_query": "{{text}}",
118
+ "metadata": {
119
+ "version": 1.0
120
+ }
121
+ },
122
+ "lambada_openai_mt_es": {
123
+ "task": "lambada_openai_mt_es",
124
+ "group": [
125
+ "lambada_multilingual"
126
+ ],
127
+ "dataset_path": "EleutherAI/lambada_openai",
128
+ "dataset_name": "es",
129
+ "test_split": "test",
130
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
131
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
132
+ "description": "",
133
+ "target_delimiter": " ",
134
+ "fewshot_delimiter": "\n\n",
135
+ "metric_list": [
136
+ {
137
+ "metric": "perplexity",
138
+ "aggregation": "perplexity",
139
+ "higher_is_better": false
140
+ },
141
+ {
142
+ "metric": "acc",
143
+ "aggregation": "mean",
144
+ "higher_is_better": true
145
+ }
146
+ ],
147
+ "output_type": "loglikelihood",
148
+ "repeats": 1,
149
+ "should_decontaminate": true,
150
+ "doc_to_decontamination_query": "{{text}}",
151
+ "metadata": {
152
+ "version": 1.0
153
+ }
154
+ },
155
+ "lambada_openai_mt_fr": {
156
+ "task": "lambada_openai_mt_fr",
157
+ "group": [
158
+ "lambada_multilingual"
159
+ ],
160
+ "dataset_path": "EleutherAI/lambada_openai",
161
+ "dataset_name": "fr",
162
+ "test_split": "test",
163
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
164
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
165
+ "description": "",
166
+ "target_delimiter": " ",
167
+ "fewshot_delimiter": "\n\n",
168
+ "metric_list": [
169
+ {
170
+ "metric": "perplexity",
171
+ "aggregation": "perplexity",
172
+ "higher_is_better": false
173
+ },
174
+ {
175
+ "metric": "acc",
176
+ "aggregation": "mean",
177
+ "higher_is_better": true
178
+ }
179
+ ],
180
+ "output_type": "loglikelihood",
181
+ "repeats": 1,
182
+ "should_decontaminate": true,
183
+ "doc_to_decontamination_query": "{{text}}",
184
+ "metadata": {
185
+ "version": 1.0
186
+ }
187
+ },
188
+ "lambada_openai_mt_it": {
189
+ "task": "lambada_openai_mt_it",
190
+ "group": [
191
+ "lambada_multilingual"
192
+ ],
193
+ "dataset_path": "EleutherAI/lambada_openai",
194
+ "dataset_name": "it",
195
+ "test_split": "test",
196
+ "doc_to_text": "{{text.split(' ')[:-1]|join(' ')}}",
197
+ "doc_to_target": "{{' '+text.split(' ')[-1]}}",
198
+ "description": "",
199
+ "target_delimiter": " ",
200
+ "fewshot_delimiter": "\n\n",
201
+ "metric_list": [
202
+ {
203
+ "metric": "perplexity",
204
+ "aggregation": "perplexity",
205
+ "higher_is_better": false
206
+ },
207
+ {
208
+ "metric": "acc",
209
+ "aggregation": "mean",
210
+ "higher_is_better": true
211
+ }
212
+ ],
213
+ "output_type": "loglikelihood",
214
+ "repeats": 1,
215
+ "should_decontaminate": true,
216
+ "doc_to_decontamination_query": "{{text}}",
217
+ "metadata": {
218
+ "version": 1.0
219
+ }
220
+ }
221
+ },
222
+ "versions": {
223
+ "lambada_multilingual": "N/A",
224
+ "lambada_openai_mt_de": 1.0,
225
+ "lambada_openai_mt_en": 1.0,
226
+ "lambada_openai_mt_es": 1.0,
227
+ "lambada_openai_mt_fr": 1.0,
228
+ "lambada_openai_mt_it": 1.0
229
+ },
230
+ "n-shot": {
231
+ "lambada_multilingual": 0,
232
+ "lambada_openai_mt_de": 0,
233
+ "lambada_openai_mt_en": 0,
234
+ "lambada_openai_mt_es": 0,
235
+ "lambada_openai_mt_fr": 0,
236
+ "lambada_openai_mt_it": 0
237
+ },
238
+ "config": {
239
+ "model": "hf",
240
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
241
+ "batch_size": "auto",
242
+ "batch_sizes": [
243
+ 64
244
+ ],
245
+ "device": null,
246
+ "use_cache": null,
247
+ "limit": null,
248
+ "bootstrap_iters": 100000,
249
+ "gen_kwargs": null
250
+ },
251
+ "git_hash": "1ee41f7"
252
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/lambada_multilingual/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80c887527c7dfb35056731a2e6af994e317fbb91a7f06ef2646439d1d88fe944
3
+ size 60619
lm-eval-output/SmerkyG/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,2594 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "mmlu": {
4
+ "acc,none": 0.2525993448226748,
5
+ "acc_stderr,none": 0.04202282990456397,
6
+ "alias": "mmlu"
7
+ },
8
+ "mmlu_humanities": {
9
+ "alias": " - humanities",
10
+ "acc,none": 0.24017003188097769,
11
+ "acc_stderr,none": 0.02857393482131495
12
+ },
13
+ "mmlu_formal_logic": {
14
+ "alias": " - formal_logic",
15
+ "acc,none": 0.2857142857142857,
16
+ "acc_stderr,none": 0.040406101782088394
17
+ },
18
+ "mmlu_high_school_european_history": {
19
+ "alias": " - high_school_european_history",
20
+ "acc,none": 0.23030303030303031,
21
+ "acc_stderr,none": 0.03287666758603489
22
+ },
23
+ "mmlu_high_school_us_history": {
24
+ "alias": " - high_school_us_history",
25
+ "acc,none": 0.27941176470588236,
26
+ "acc_stderr,none": 0.031493281045079556
27
+ },
28
+ "mmlu_high_school_world_history": {
29
+ "alias": " - high_school_world_history",
30
+ "acc,none": 0.2489451476793249,
31
+ "acc_stderr,none": 0.028146970599422644
32
+ },
33
+ "mmlu_international_law": {
34
+ "alias": " - international_law",
35
+ "acc,none": 0.17355371900826447,
36
+ "acc_stderr,none": 0.0345727283691767
37
+ },
38
+ "mmlu_jurisprudence": {
39
+ "alias": " - jurisprudence",
40
+ "acc,none": 0.25,
41
+ "acc_stderr,none": 0.04186091791394607
42
+ },
43
+ "mmlu_logical_fallacies": {
44
+ "alias": " - logical_fallacies",
45
+ "acc,none": 0.25153374233128833,
46
+ "acc_stderr,none": 0.034089978868575295
47
+ },
48
+ "mmlu_moral_disputes": {
49
+ "alias": " - moral_disputes",
50
+ "acc,none": 0.21098265895953758,
51
+ "acc_stderr,none": 0.021966309947043124
52
+ },
53
+ "mmlu_moral_scenarios": {
54
+ "alias": " - moral_scenarios",
55
+ "acc,none": 0.2346368715083799,
56
+ "acc_stderr,none": 0.014173044098303679
57
+ },
58
+ "mmlu_philosophy": {
59
+ "alias": " - philosophy",
60
+ "acc,none": 0.2540192926045016,
61
+ "acc_stderr,none": 0.02472386150477169
62
+ },
63
+ "mmlu_prehistory": {
64
+ "alias": " - prehistory",
65
+ "acc,none": 0.2222222222222222,
66
+ "acc_stderr,none": 0.023132376234543346
67
+ },
68
+ "mmlu_professional_law": {
69
+ "alias": " - professional_law",
70
+ "acc,none": 0.24967405475880053,
71
+ "acc_stderr,none": 0.011054538377832327
72
+ },
73
+ "mmlu_world_religions": {
74
+ "alias": " - world_religions",
75
+ "acc,none": 0.19883040935672514,
76
+ "acc_stderr,none": 0.03061111655743253
77
+ },
78
+ "mmlu_other": {
79
+ "alias": " - other",
80
+ "acc,none": 0.25683939491470875,
81
+ "acc_stderr,none": 0.05743915320464653
82
+ },
83
+ "mmlu_business_ethics": {
84
+ "alias": " - business_ethics",
85
+ "acc,none": 0.34,
86
+ "acc_stderr,none": 0.04760952285695235
87
+ },
88
+ "mmlu_clinical_knowledge": {
89
+ "alias": " - clinical_knowledge",
90
+ "acc,none": 0.32075471698113206,
91
+ "acc_stderr,none": 0.028727502957880263
92
+ },
93
+ "mmlu_college_medicine": {
94
+ "alias": " - college_medicine",
95
+ "acc,none": 0.3236994219653179,
96
+ "acc_stderr,none": 0.03567603799639171
97
+ },
98
+ "mmlu_global_facts": {
99
+ "alias": " - global_facts",
100
+ "acc,none": 0.2,
101
+ "acc_stderr,none": 0.04020151261036845
102
+ },
103
+ "mmlu_human_aging": {
104
+ "alias": " - human_aging",
105
+ "acc,none": 0.16143497757847533,
106
+ "acc_stderr,none": 0.024693957899128472
107
+ },
108
+ "mmlu_management": {
109
+ "alias": " - management",
110
+ "acc,none": 0.39805825242718446,
111
+ "acc_stderr,none": 0.04846748253977239
112
+ },
113
+ "mmlu_marketing": {
114
+ "alias": " - marketing",
115
+ "acc,none": 0.2094017094017094,
116
+ "acc_stderr,none": 0.026655699653922754
117
+ },
118
+ "mmlu_medical_genetics": {
119
+ "alias": " - medical_genetics",
120
+ "acc,none": 0.32,
121
+ "acc_stderr,none": 0.04688261722621505
122
+ },
123
+ "mmlu_miscellaneous": {
124
+ "alias": " - miscellaneous",
125
+ "acc,none": 0.22349936143039592,
126
+ "acc_stderr,none": 0.014897235229450707
127
+ },
128
+ "mmlu_nutrition": {
129
+ "alias": " - nutrition",
130
+ "acc,none": 0.30718954248366015,
131
+ "acc_stderr,none": 0.026415601914388992
132
+ },
133
+ "mmlu_professional_accounting": {
134
+ "alias": " - professional_accounting",
135
+ "acc,none": 0.24822695035460993,
136
+ "acc_stderr,none": 0.025770015644290396
137
+ },
138
+ "mmlu_professional_medicine": {
139
+ "alias": " - professional_medicine",
140
+ "acc,none": 0.25735294117647056,
141
+ "acc_stderr,none": 0.026556519470041524
142
+ },
143
+ "mmlu_virology": {
144
+ "alias": " - virology",
145
+ "acc,none": 0.21686746987951808,
146
+ "acc_stderr,none": 0.03208284450356365
147
+ },
148
+ "mmlu_social_sciences": {
149
+ "alias": " - social_sciences",
150
+ "acc,none": 0.26454338641533964,
151
+ "acc_stderr,none": 0.034586953407146494
152
+ },
153
+ "mmlu_econometrics": {
154
+ "alias": " - econometrics",
155
+ "acc,none": 0.2719298245614035,
156
+ "acc_stderr,none": 0.04185774424022056
157
+ },
158
+ "mmlu_high_school_geography": {
159
+ "alias": " - high_school_geography",
160
+ "acc,none": 0.3333333333333333,
161
+ "acc_stderr,none": 0.03358618145732524
162
+ },
163
+ "mmlu_high_school_government_and_politics": {
164
+ "alias": " - high_school_government_and_politics",
165
+ "acc,none": 0.27461139896373055,
166
+ "acc_stderr,none": 0.032210245080411544
167
+ },
168
+ "mmlu_high_school_macroeconomics": {
169
+ "alias": " - high_school_macroeconomics",
170
+ "acc,none": 0.258974358974359,
171
+ "acc_stderr,none": 0.022211106810061665
172
+ },
173
+ "mmlu_high_school_microeconomics": {
174
+ "alias": " - high_school_microeconomics",
175
+ "acc,none": 0.2605042016806723,
176
+ "acc_stderr,none": 0.028510251512341937
177
+ },
178
+ "mmlu_high_school_psychology": {
179
+ "alias": " - high_school_psychology",
180
+ "acc,none": 0.27155963302752295,
181
+ "acc_stderr,none": 0.019069098363191445
182
+ },
183
+ "mmlu_human_sexuality": {
184
+ "alias": " - human_sexuality",
185
+ "acc,none": 0.21374045801526717,
186
+ "acc_stderr,none": 0.0359546161177469
187
+ },
188
+ "mmlu_professional_psychology": {
189
+ "alias": " - professional_psychology",
190
+ "acc,none": 0.24183006535947713,
191
+ "acc_stderr,none": 0.017322789207784326
192
+ },
193
+ "mmlu_public_relations": {
194
+ "alias": " - public_relations",
195
+ "acc,none": 0.24545454545454545,
196
+ "acc_stderr,none": 0.041220665028782834
197
+ },
198
+ "mmlu_security_studies": {
199
+ "alias": " - security_studies",
200
+ "acc,none": 0.2612244897959184,
201
+ "acc_stderr,none": 0.028123429335142787
202
+ },
203
+ "mmlu_sociology": {
204
+ "alias": " - sociology",
205
+ "acc,none": 0.3034825870646766,
206
+ "acc_stderr,none": 0.03251006816458618
207
+ },
208
+ "mmlu_us_foreign_policy": {
209
+ "alias": " - us_foreign_policy",
210
+ "acc,none": 0.25,
211
+ "acc_stderr,none": 0.04351941398892446
212
+ },
213
+ "mmlu_stem": {
214
+ "alias": " - stem",
215
+ "acc,none": 0.25531240088804313,
216
+ "acc_stderr,none": 0.04558330291190535
217
+ },
218
+ "mmlu_abstract_algebra": {
219
+ "alias": " - abstract_algebra",
220
+ "acc,none": 0.26,
221
+ "acc_stderr,none": 0.0440844002276808
222
+ },
223
+ "mmlu_anatomy": {
224
+ "alias": " - anatomy",
225
+ "acc,none": 0.22962962962962963,
226
+ "acc_stderr,none": 0.03633384414073463
227
+ },
228
+ "mmlu_astronomy": {
229
+ "alias": " - astronomy",
230
+ "acc,none": 0.24342105263157895,
231
+ "acc_stderr,none": 0.034923496688842384
232
+ },
233
+ "mmlu_college_biology": {
234
+ "alias": " - college_biology",
235
+ "acc,none": 0.2777777777777778,
236
+ "acc_stderr,none": 0.03745554791462457
237
+ },
238
+ "mmlu_college_chemistry": {
239
+ "alias": " - college_chemistry",
240
+ "acc,none": 0.34,
241
+ "acc_stderr,none": 0.047609522856952344
242
+ },
243
+ "mmlu_college_computer_science": {
244
+ "alias": " - college_computer_science",
245
+ "acc,none": 0.25,
246
+ "acc_stderr,none": 0.04351941398892446
247
+ },
248
+ "mmlu_college_mathematics": {
249
+ "alias": " - college_mathematics",
250
+ "acc,none": 0.23,
251
+ "acc_stderr,none": 0.042295258468165044
252
+ },
253
+ "mmlu_college_physics": {
254
+ "alias": " - college_physics",
255
+ "acc,none": 0.2647058823529412,
256
+ "acc_stderr,none": 0.04389869956808778
257
+ },
258
+ "mmlu_computer_security": {
259
+ "alias": " - computer_security",
260
+ "acc,none": 0.22,
261
+ "acc_stderr,none": 0.041633319989322674
262
+ },
263
+ "mmlu_conceptual_physics": {
264
+ "alias": " - conceptual_physics",
265
+ "acc,none": 0.18723404255319148,
266
+ "acc_stderr,none": 0.025501588341883607
267
+ },
268
+ "mmlu_electrical_engineering": {
269
+ "alias": " - electrical_engineering",
270
+ "acc,none": 0.23448275862068965,
271
+ "acc_stderr,none": 0.035306258743465914
272
+ },
273
+ "mmlu_elementary_mathematics": {
274
+ "alias": " - elementary_mathematics",
275
+ "acc,none": 0.2962962962962963,
276
+ "acc_stderr,none": 0.023517294335963276
277
+ },
278
+ "mmlu_high_school_biology": {
279
+ "alias": " - high_school_biology",
280
+ "acc,none": 0.2903225806451613,
281
+ "acc_stderr,none": 0.025822106119415895
282
+ },
283
+ "mmlu_high_school_chemistry": {
284
+ "alias": " - high_school_chemistry",
285
+ "acc,none": 0.22167487684729065,
286
+ "acc_stderr,none": 0.029225575892489614
287
+ },
288
+ "mmlu_high_school_computer_science": {
289
+ "alias": " - high_school_computer_science",
290
+ "acc,none": 0.3,
291
+ "acc_stderr,none": 0.046056618647183814
292
+ },
293
+ "mmlu_high_school_mathematics": {
294
+ "alias": " - high_school_mathematics",
295
+ "acc,none": 0.2518518518518518,
296
+ "acc_stderr,none": 0.02646611753895991
297
+ },
298
+ "mmlu_high_school_physics": {
299
+ "alias": " - high_school_physics",
300
+ "acc,none": 0.2582781456953642,
301
+ "acc_stderr,none": 0.035737053147634576
302
+ },
303
+ "mmlu_high_school_statistics": {
304
+ "alias": " - high_school_statistics",
305
+ "acc,none": 0.25925925925925924,
306
+ "acc_stderr,none": 0.029886910547626964
307
+ },
308
+ "mmlu_machine_learning": {
309
+ "alias": " - machine_learning",
310
+ "acc,none": 0.19642857142857142,
311
+ "acc_stderr,none": 0.03770970049347019
312
+ }
313
+ },
314
+ "groups": {
315
+ "mmlu": {
316
+ "acc,none": 0.2525993448226748,
317
+ "acc_stderr,none": 0.04202282990456397,
318
+ "alias": "mmlu"
319
+ },
320
+ "mmlu_humanities": {
321
+ "alias": " - humanities",
322
+ "acc,none": 0.24017003188097769,
323
+ "acc_stderr,none": 0.02857393482131495
324
+ },
325
+ "mmlu_other": {
326
+ "alias": " - other",
327
+ "acc,none": 0.25683939491470875,
328
+ "acc_stderr,none": 0.05743915320464653
329
+ },
330
+ "mmlu_social_sciences": {
331
+ "alias": " - social_sciences",
332
+ "acc,none": 0.26454338641533964,
333
+ "acc_stderr,none": 0.034586953407146494
334
+ },
335
+ "mmlu_stem": {
336
+ "alias": " - stem",
337
+ "acc,none": 0.25531240088804313,
338
+ "acc_stderr,none": 0.04558330291190535
339
+ }
340
+ },
341
+ "configs": {
342
+ "mmlu_abstract_algebra": {
343
+ "task": "mmlu_abstract_algebra",
344
+ "task_alias": "abstract_algebra",
345
+ "group": "mmlu_stem",
346
+ "group_alias": "stem",
347
+ "dataset_path": "hails/mmlu_no_train",
348
+ "dataset_name": "abstract_algebra",
349
+ "test_split": "test",
350
+ "fewshot_split": "dev",
351
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
352
+ "doc_to_target": "answer",
353
+ "doc_to_choice": [
354
+ "A",
355
+ "B",
356
+ "C",
357
+ "D"
358
+ ],
359
+ "description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n",
360
+ "target_delimiter": " ",
361
+ "fewshot_delimiter": "\n\n",
362
+ "fewshot_config": {
363
+ "sampler": "first_n"
364
+ },
365
+ "metric_list": [
366
+ {
367
+ "metric": "acc",
368
+ "aggregation": "mean",
369
+ "higher_is_better": true
370
+ }
371
+ ],
372
+ "output_type": "multiple_choice",
373
+ "repeats": 1,
374
+ "should_decontaminate": false,
375
+ "metadata": {
376
+ "version": 0.0
377
+ }
378
+ },
379
+ "mmlu_anatomy": {
380
+ "task": "mmlu_anatomy",
381
+ "task_alias": "anatomy",
382
+ "group": "mmlu_stem",
383
+ "group_alias": "stem",
384
+ "dataset_path": "hails/mmlu_no_train",
385
+ "dataset_name": "anatomy",
386
+ "test_split": "test",
387
+ "fewshot_split": "dev",
388
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
389
+ "doc_to_target": "answer",
390
+ "doc_to_choice": [
391
+ "A",
392
+ "B",
393
+ "C",
394
+ "D"
395
+ ],
396
+ "description": "The following are multiple choice questions (with answers) about anatomy.\n\n",
397
+ "target_delimiter": " ",
398
+ "fewshot_delimiter": "\n\n",
399
+ "fewshot_config": {
400
+ "sampler": "first_n"
401
+ },
402
+ "metric_list": [
403
+ {
404
+ "metric": "acc",
405
+ "aggregation": "mean",
406
+ "higher_is_better": true
407
+ }
408
+ ],
409
+ "output_type": "multiple_choice",
410
+ "repeats": 1,
411
+ "should_decontaminate": false,
412
+ "metadata": {
413
+ "version": 0.0
414
+ }
415
+ },
416
+ "mmlu_astronomy": {
417
+ "task": "mmlu_astronomy",
418
+ "task_alias": "astronomy",
419
+ "group": "mmlu_stem",
420
+ "group_alias": "stem",
421
+ "dataset_path": "hails/mmlu_no_train",
422
+ "dataset_name": "astronomy",
423
+ "test_split": "test",
424
+ "fewshot_split": "dev",
425
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
426
+ "doc_to_target": "answer",
427
+ "doc_to_choice": [
428
+ "A",
429
+ "B",
430
+ "C",
431
+ "D"
432
+ ],
433
+ "description": "The following are multiple choice questions (with answers) about astronomy.\n\n",
434
+ "target_delimiter": " ",
435
+ "fewshot_delimiter": "\n\n",
436
+ "fewshot_config": {
437
+ "sampler": "first_n"
438
+ },
439
+ "metric_list": [
440
+ {
441
+ "metric": "acc",
442
+ "aggregation": "mean",
443
+ "higher_is_better": true
444
+ }
445
+ ],
446
+ "output_type": "multiple_choice",
447
+ "repeats": 1,
448
+ "should_decontaminate": false,
449
+ "metadata": {
450
+ "version": 0.0
451
+ }
452
+ },
453
+ "mmlu_business_ethics": {
454
+ "task": "mmlu_business_ethics",
455
+ "task_alias": "business_ethics",
456
+ "group": "mmlu_other",
457
+ "group_alias": "other",
458
+ "dataset_path": "hails/mmlu_no_train",
459
+ "dataset_name": "business_ethics",
460
+ "test_split": "test",
461
+ "fewshot_split": "dev",
462
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
463
+ "doc_to_target": "answer",
464
+ "doc_to_choice": [
465
+ "A",
466
+ "B",
467
+ "C",
468
+ "D"
469
+ ],
470
+ "description": "The following are multiple choice questions (with answers) about business ethics.\n\n",
471
+ "target_delimiter": " ",
472
+ "fewshot_delimiter": "\n\n",
473
+ "fewshot_config": {
474
+ "sampler": "first_n"
475
+ },
476
+ "metric_list": [
477
+ {
478
+ "metric": "acc",
479
+ "aggregation": "mean",
480
+ "higher_is_better": true
481
+ }
482
+ ],
483
+ "output_type": "multiple_choice",
484
+ "repeats": 1,
485
+ "should_decontaminate": false,
486
+ "metadata": {
487
+ "version": 0.0
488
+ }
489
+ },
490
+ "mmlu_clinical_knowledge": {
491
+ "task": "mmlu_clinical_knowledge",
492
+ "task_alias": "clinical_knowledge",
493
+ "group": "mmlu_other",
494
+ "group_alias": "other",
495
+ "dataset_path": "hails/mmlu_no_train",
496
+ "dataset_name": "clinical_knowledge",
497
+ "test_split": "test",
498
+ "fewshot_split": "dev",
499
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
500
+ "doc_to_target": "answer",
501
+ "doc_to_choice": [
502
+ "A",
503
+ "B",
504
+ "C",
505
+ "D"
506
+ ],
507
+ "description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n",
508
+ "target_delimiter": " ",
509
+ "fewshot_delimiter": "\n\n",
510
+ "fewshot_config": {
511
+ "sampler": "first_n"
512
+ },
513
+ "metric_list": [
514
+ {
515
+ "metric": "acc",
516
+ "aggregation": "mean",
517
+ "higher_is_better": true
518
+ }
519
+ ],
520
+ "output_type": "multiple_choice",
521
+ "repeats": 1,
522
+ "should_decontaminate": false,
523
+ "metadata": {
524
+ "version": 0.0
525
+ }
526
+ },
527
+ "mmlu_college_biology": {
528
+ "task": "mmlu_college_biology",
529
+ "task_alias": "college_biology",
530
+ "group": "mmlu_stem",
531
+ "group_alias": "stem",
532
+ "dataset_path": "hails/mmlu_no_train",
533
+ "dataset_name": "college_biology",
534
+ "test_split": "test",
535
+ "fewshot_split": "dev",
536
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
537
+ "doc_to_target": "answer",
538
+ "doc_to_choice": [
539
+ "A",
540
+ "B",
541
+ "C",
542
+ "D"
543
+ ],
544
+ "description": "The following are multiple choice questions (with answers) about college biology.\n\n",
545
+ "target_delimiter": " ",
546
+ "fewshot_delimiter": "\n\n",
547
+ "fewshot_config": {
548
+ "sampler": "first_n"
549
+ },
550
+ "metric_list": [
551
+ {
552
+ "metric": "acc",
553
+ "aggregation": "mean",
554
+ "higher_is_better": true
555
+ }
556
+ ],
557
+ "output_type": "multiple_choice",
558
+ "repeats": 1,
559
+ "should_decontaminate": false,
560
+ "metadata": {
561
+ "version": 0.0
562
+ }
563
+ },
564
+ "mmlu_college_chemistry": {
565
+ "task": "mmlu_college_chemistry",
566
+ "task_alias": "college_chemistry",
567
+ "group": "mmlu_stem",
568
+ "group_alias": "stem",
569
+ "dataset_path": "hails/mmlu_no_train",
570
+ "dataset_name": "college_chemistry",
571
+ "test_split": "test",
572
+ "fewshot_split": "dev",
573
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
574
+ "doc_to_target": "answer",
575
+ "doc_to_choice": [
576
+ "A",
577
+ "B",
578
+ "C",
579
+ "D"
580
+ ],
581
+ "description": "The following are multiple choice questions (with answers) about college chemistry.\n\n",
582
+ "target_delimiter": " ",
583
+ "fewshot_delimiter": "\n\n",
584
+ "fewshot_config": {
585
+ "sampler": "first_n"
586
+ },
587
+ "metric_list": [
588
+ {
589
+ "metric": "acc",
590
+ "aggregation": "mean",
591
+ "higher_is_better": true
592
+ }
593
+ ],
594
+ "output_type": "multiple_choice",
595
+ "repeats": 1,
596
+ "should_decontaminate": false,
597
+ "metadata": {
598
+ "version": 0.0
599
+ }
600
+ },
601
+ "mmlu_college_computer_science": {
602
+ "task": "mmlu_college_computer_science",
603
+ "task_alias": "college_computer_science",
604
+ "group": "mmlu_stem",
605
+ "group_alias": "stem",
606
+ "dataset_path": "hails/mmlu_no_train",
607
+ "dataset_name": "college_computer_science",
608
+ "test_split": "test",
609
+ "fewshot_split": "dev",
610
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
611
+ "doc_to_target": "answer",
612
+ "doc_to_choice": [
613
+ "A",
614
+ "B",
615
+ "C",
616
+ "D"
617
+ ],
618
+ "description": "The following are multiple choice questions (with answers) about college computer science.\n\n",
619
+ "target_delimiter": " ",
620
+ "fewshot_delimiter": "\n\n",
621
+ "fewshot_config": {
622
+ "sampler": "first_n"
623
+ },
624
+ "metric_list": [
625
+ {
626
+ "metric": "acc",
627
+ "aggregation": "mean",
628
+ "higher_is_better": true
629
+ }
630
+ ],
631
+ "output_type": "multiple_choice",
632
+ "repeats": 1,
633
+ "should_decontaminate": false,
634
+ "metadata": {
635
+ "version": 0.0
636
+ }
637
+ },
638
+ "mmlu_college_mathematics": {
639
+ "task": "mmlu_college_mathematics",
640
+ "task_alias": "college_mathematics",
641
+ "group": "mmlu_stem",
642
+ "group_alias": "stem",
643
+ "dataset_path": "hails/mmlu_no_train",
644
+ "dataset_name": "college_mathematics",
645
+ "test_split": "test",
646
+ "fewshot_split": "dev",
647
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
648
+ "doc_to_target": "answer",
649
+ "doc_to_choice": [
650
+ "A",
651
+ "B",
652
+ "C",
653
+ "D"
654
+ ],
655
+ "description": "The following are multiple choice questions (with answers) about college mathematics.\n\n",
656
+ "target_delimiter": " ",
657
+ "fewshot_delimiter": "\n\n",
658
+ "fewshot_config": {
659
+ "sampler": "first_n"
660
+ },
661
+ "metric_list": [
662
+ {
663
+ "metric": "acc",
664
+ "aggregation": "mean",
665
+ "higher_is_better": true
666
+ }
667
+ ],
668
+ "output_type": "multiple_choice",
669
+ "repeats": 1,
670
+ "should_decontaminate": false,
671
+ "metadata": {
672
+ "version": 0.0
673
+ }
674
+ },
675
+ "mmlu_college_medicine": {
676
+ "task": "mmlu_college_medicine",
677
+ "task_alias": "college_medicine",
678
+ "group": "mmlu_other",
679
+ "group_alias": "other",
680
+ "dataset_path": "hails/mmlu_no_train",
681
+ "dataset_name": "college_medicine",
682
+ "test_split": "test",
683
+ "fewshot_split": "dev",
684
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
685
+ "doc_to_target": "answer",
686
+ "doc_to_choice": [
687
+ "A",
688
+ "B",
689
+ "C",
690
+ "D"
691
+ ],
692
+ "description": "The following are multiple choice questions (with answers) about college medicine.\n\n",
693
+ "target_delimiter": " ",
694
+ "fewshot_delimiter": "\n\n",
695
+ "fewshot_config": {
696
+ "sampler": "first_n"
697
+ },
698
+ "metric_list": [
699
+ {
700
+ "metric": "acc",
701
+ "aggregation": "mean",
702
+ "higher_is_better": true
703
+ }
704
+ ],
705
+ "output_type": "multiple_choice",
706
+ "repeats": 1,
707
+ "should_decontaminate": false,
708
+ "metadata": {
709
+ "version": 0.0
710
+ }
711
+ },
712
+ "mmlu_college_physics": {
713
+ "task": "mmlu_college_physics",
714
+ "task_alias": "college_physics",
715
+ "group": "mmlu_stem",
716
+ "group_alias": "stem",
717
+ "dataset_path": "hails/mmlu_no_train",
718
+ "dataset_name": "college_physics",
719
+ "test_split": "test",
720
+ "fewshot_split": "dev",
721
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
722
+ "doc_to_target": "answer",
723
+ "doc_to_choice": [
724
+ "A",
725
+ "B",
726
+ "C",
727
+ "D"
728
+ ],
729
+ "description": "The following are multiple choice questions (with answers) about college physics.\n\n",
730
+ "target_delimiter": " ",
731
+ "fewshot_delimiter": "\n\n",
732
+ "fewshot_config": {
733
+ "sampler": "first_n"
734
+ },
735
+ "metric_list": [
736
+ {
737
+ "metric": "acc",
738
+ "aggregation": "mean",
739
+ "higher_is_better": true
740
+ }
741
+ ],
742
+ "output_type": "multiple_choice",
743
+ "repeats": 1,
744
+ "should_decontaminate": false,
745
+ "metadata": {
746
+ "version": 0.0
747
+ }
748
+ },
749
+ "mmlu_computer_security": {
750
+ "task": "mmlu_computer_security",
751
+ "task_alias": "computer_security",
752
+ "group": "mmlu_stem",
753
+ "group_alias": "stem",
754
+ "dataset_path": "hails/mmlu_no_train",
755
+ "dataset_name": "computer_security",
756
+ "test_split": "test",
757
+ "fewshot_split": "dev",
758
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
759
+ "doc_to_target": "answer",
760
+ "doc_to_choice": [
761
+ "A",
762
+ "B",
763
+ "C",
764
+ "D"
765
+ ],
766
+ "description": "The following are multiple choice questions (with answers) about computer security.\n\n",
767
+ "target_delimiter": " ",
768
+ "fewshot_delimiter": "\n\n",
769
+ "fewshot_config": {
770
+ "sampler": "first_n"
771
+ },
772
+ "metric_list": [
773
+ {
774
+ "metric": "acc",
775
+ "aggregation": "mean",
776
+ "higher_is_better": true
777
+ }
778
+ ],
779
+ "output_type": "multiple_choice",
780
+ "repeats": 1,
781
+ "should_decontaminate": false,
782
+ "metadata": {
783
+ "version": 0.0
784
+ }
785
+ },
786
+ "mmlu_conceptual_physics": {
787
+ "task": "mmlu_conceptual_physics",
788
+ "task_alias": "conceptual_physics",
789
+ "group": "mmlu_stem",
790
+ "group_alias": "stem",
791
+ "dataset_path": "hails/mmlu_no_train",
792
+ "dataset_name": "conceptual_physics",
793
+ "test_split": "test",
794
+ "fewshot_split": "dev",
795
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
796
+ "doc_to_target": "answer",
797
+ "doc_to_choice": [
798
+ "A",
799
+ "B",
800
+ "C",
801
+ "D"
802
+ ],
803
+ "description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n",
804
+ "target_delimiter": " ",
805
+ "fewshot_delimiter": "\n\n",
806
+ "fewshot_config": {
807
+ "sampler": "first_n"
808
+ },
809
+ "metric_list": [
810
+ {
811
+ "metric": "acc",
812
+ "aggregation": "mean",
813
+ "higher_is_better": true
814
+ }
815
+ ],
816
+ "output_type": "multiple_choice",
817
+ "repeats": 1,
818
+ "should_decontaminate": false,
819
+ "metadata": {
820
+ "version": 0.0
821
+ }
822
+ },
823
+ "mmlu_econometrics": {
824
+ "task": "mmlu_econometrics",
825
+ "task_alias": "econometrics",
826
+ "group": "mmlu_social_sciences",
827
+ "group_alias": "social_sciences",
828
+ "dataset_path": "hails/mmlu_no_train",
829
+ "dataset_name": "econometrics",
830
+ "test_split": "test",
831
+ "fewshot_split": "dev",
832
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
833
+ "doc_to_target": "answer",
834
+ "doc_to_choice": [
835
+ "A",
836
+ "B",
837
+ "C",
838
+ "D"
839
+ ],
840
+ "description": "The following are multiple choice questions (with answers) about econometrics.\n\n",
841
+ "target_delimiter": " ",
842
+ "fewshot_delimiter": "\n\n",
843
+ "fewshot_config": {
844
+ "sampler": "first_n"
845
+ },
846
+ "metric_list": [
847
+ {
848
+ "metric": "acc",
849
+ "aggregation": "mean",
850
+ "higher_is_better": true
851
+ }
852
+ ],
853
+ "output_type": "multiple_choice",
854
+ "repeats": 1,
855
+ "should_decontaminate": false,
856
+ "metadata": {
857
+ "version": 0.0
858
+ }
859
+ },
860
+ "mmlu_electrical_engineering": {
861
+ "task": "mmlu_electrical_engineering",
862
+ "task_alias": "electrical_engineering",
863
+ "group": "mmlu_stem",
864
+ "group_alias": "stem",
865
+ "dataset_path": "hails/mmlu_no_train",
866
+ "dataset_name": "electrical_engineering",
867
+ "test_split": "test",
868
+ "fewshot_split": "dev",
869
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
870
+ "doc_to_target": "answer",
871
+ "doc_to_choice": [
872
+ "A",
873
+ "B",
874
+ "C",
875
+ "D"
876
+ ],
877
+ "description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n",
878
+ "target_delimiter": " ",
879
+ "fewshot_delimiter": "\n\n",
880
+ "fewshot_config": {
881
+ "sampler": "first_n"
882
+ },
883
+ "metric_list": [
884
+ {
885
+ "metric": "acc",
886
+ "aggregation": "mean",
887
+ "higher_is_better": true
888
+ }
889
+ ],
890
+ "output_type": "multiple_choice",
891
+ "repeats": 1,
892
+ "should_decontaminate": false,
893
+ "metadata": {
894
+ "version": 0.0
895
+ }
896
+ },
897
+ "mmlu_elementary_mathematics": {
898
+ "task": "mmlu_elementary_mathematics",
899
+ "task_alias": "elementary_mathematics",
900
+ "group": "mmlu_stem",
901
+ "group_alias": "stem",
902
+ "dataset_path": "hails/mmlu_no_train",
903
+ "dataset_name": "elementary_mathematics",
904
+ "test_split": "test",
905
+ "fewshot_split": "dev",
906
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
907
+ "doc_to_target": "answer",
908
+ "doc_to_choice": [
909
+ "A",
910
+ "B",
911
+ "C",
912
+ "D"
913
+ ],
914
+ "description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n",
915
+ "target_delimiter": " ",
916
+ "fewshot_delimiter": "\n\n",
917
+ "fewshot_config": {
918
+ "sampler": "first_n"
919
+ },
920
+ "metric_list": [
921
+ {
922
+ "metric": "acc",
923
+ "aggregation": "mean",
924
+ "higher_is_better": true
925
+ }
926
+ ],
927
+ "output_type": "multiple_choice",
928
+ "repeats": 1,
929
+ "should_decontaminate": false,
930
+ "metadata": {
931
+ "version": 0.0
932
+ }
933
+ },
934
+ "mmlu_formal_logic": {
935
+ "task": "mmlu_formal_logic",
936
+ "task_alias": "formal_logic",
937
+ "group": "mmlu_humanities",
938
+ "group_alias": "humanities",
939
+ "dataset_path": "hails/mmlu_no_train",
940
+ "dataset_name": "formal_logic",
941
+ "test_split": "test",
942
+ "fewshot_split": "dev",
943
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
944
+ "doc_to_target": "answer",
945
+ "doc_to_choice": [
946
+ "A",
947
+ "B",
948
+ "C",
949
+ "D"
950
+ ],
951
+ "description": "The following are multiple choice questions (with answers) about formal logic.\n\n",
952
+ "target_delimiter": " ",
953
+ "fewshot_delimiter": "\n\n",
954
+ "fewshot_config": {
955
+ "sampler": "first_n"
956
+ },
957
+ "metric_list": [
958
+ {
959
+ "metric": "acc",
960
+ "aggregation": "mean",
961
+ "higher_is_better": true
962
+ }
963
+ ],
964
+ "output_type": "multiple_choice",
965
+ "repeats": 1,
966
+ "should_decontaminate": false,
967
+ "metadata": {
968
+ "version": 0.0
969
+ }
970
+ },
971
+ "mmlu_global_facts": {
972
+ "task": "mmlu_global_facts",
973
+ "task_alias": "global_facts",
974
+ "group": "mmlu_other",
975
+ "group_alias": "other",
976
+ "dataset_path": "hails/mmlu_no_train",
977
+ "dataset_name": "global_facts",
978
+ "test_split": "test",
979
+ "fewshot_split": "dev",
980
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
981
+ "doc_to_target": "answer",
982
+ "doc_to_choice": [
983
+ "A",
984
+ "B",
985
+ "C",
986
+ "D"
987
+ ],
988
+ "description": "The following are multiple choice questions (with answers) about global facts.\n\n",
989
+ "target_delimiter": " ",
990
+ "fewshot_delimiter": "\n\n",
991
+ "fewshot_config": {
992
+ "sampler": "first_n"
993
+ },
994
+ "metric_list": [
995
+ {
996
+ "metric": "acc",
997
+ "aggregation": "mean",
998
+ "higher_is_better": true
999
+ }
1000
+ ],
1001
+ "output_type": "multiple_choice",
1002
+ "repeats": 1,
1003
+ "should_decontaminate": false,
1004
+ "metadata": {
1005
+ "version": 0.0
1006
+ }
1007
+ },
1008
+ "mmlu_high_school_biology": {
1009
+ "task": "mmlu_high_school_biology",
1010
+ "task_alias": "high_school_biology",
1011
+ "group": "mmlu_stem",
1012
+ "group_alias": "stem",
1013
+ "dataset_path": "hails/mmlu_no_train",
1014
+ "dataset_name": "high_school_biology",
1015
+ "test_split": "test",
1016
+ "fewshot_split": "dev",
1017
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1018
+ "doc_to_target": "answer",
1019
+ "doc_to_choice": [
1020
+ "A",
1021
+ "B",
1022
+ "C",
1023
+ "D"
1024
+ ],
1025
+ "description": "The following are multiple choice questions (with answers) about high school biology.\n\n",
1026
+ "target_delimiter": " ",
1027
+ "fewshot_delimiter": "\n\n",
1028
+ "fewshot_config": {
1029
+ "sampler": "first_n"
1030
+ },
1031
+ "metric_list": [
1032
+ {
1033
+ "metric": "acc",
1034
+ "aggregation": "mean",
1035
+ "higher_is_better": true
1036
+ }
1037
+ ],
1038
+ "output_type": "multiple_choice",
1039
+ "repeats": 1,
1040
+ "should_decontaminate": false,
1041
+ "metadata": {
1042
+ "version": 0.0
1043
+ }
1044
+ },
1045
+ "mmlu_high_school_chemistry": {
1046
+ "task": "mmlu_high_school_chemistry",
1047
+ "task_alias": "high_school_chemistry",
1048
+ "group": "mmlu_stem",
1049
+ "group_alias": "stem",
1050
+ "dataset_path": "hails/mmlu_no_train",
1051
+ "dataset_name": "high_school_chemistry",
1052
+ "test_split": "test",
1053
+ "fewshot_split": "dev",
1054
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1055
+ "doc_to_target": "answer",
1056
+ "doc_to_choice": [
1057
+ "A",
1058
+ "B",
1059
+ "C",
1060
+ "D"
1061
+ ],
1062
+ "description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n",
1063
+ "target_delimiter": " ",
1064
+ "fewshot_delimiter": "\n\n",
1065
+ "fewshot_config": {
1066
+ "sampler": "first_n"
1067
+ },
1068
+ "metric_list": [
1069
+ {
1070
+ "metric": "acc",
1071
+ "aggregation": "mean",
1072
+ "higher_is_better": true
1073
+ }
1074
+ ],
1075
+ "output_type": "multiple_choice",
1076
+ "repeats": 1,
1077
+ "should_decontaminate": false,
1078
+ "metadata": {
1079
+ "version": 0.0
1080
+ }
1081
+ },
1082
+ "mmlu_high_school_computer_science": {
1083
+ "task": "mmlu_high_school_computer_science",
1084
+ "task_alias": "high_school_computer_science",
1085
+ "group": "mmlu_stem",
1086
+ "group_alias": "stem",
1087
+ "dataset_path": "hails/mmlu_no_train",
1088
+ "dataset_name": "high_school_computer_science",
1089
+ "test_split": "test",
1090
+ "fewshot_split": "dev",
1091
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1092
+ "doc_to_target": "answer",
1093
+ "doc_to_choice": [
1094
+ "A",
1095
+ "B",
1096
+ "C",
1097
+ "D"
1098
+ ],
1099
+ "description": "The following are multiple choice questions (with answers) about high school computer science.\n\n",
1100
+ "target_delimiter": " ",
1101
+ "fewshot_delimiter": "\n\n",
1102
+ "fewshot_config": {
1103
+ "sampler": "first_n"
1104
+ },
1105
+ "metric_list": [
1106
+ {
1107
+ "metric": "acc",
1108
+ "aggregation": "mean",
1109
+ "higher_is_better": true
1110
+ }
1111
+ ],
1112
+ "output_type": "multiple_choice",
1113
+ "repeats": 1,
1114
+ "should_decontaminate": false,
1115
+ "metadata": {
1116
+ "version": 0.0
1117
+ }
1118
+ },
1119
+ "mmlu_high_school_european_history": {
1120
+ "task": "mmlu_high_school_european_history",
1121
+ "task_alias": "high_school_european_history",
1122
+ "group": "mmlu_humanities",
1123
+ "group_alias": "humanities",
1124
+ "dataset_path": "hails/mmlu_no_train",
1125
+ "dataset_name": "high_school_european_history",
1126
+ "test_split": "test",
1127
+ "fewshot_split": "dev",
1128
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1129
+ "doc_to_target": "answer",
1130
+ "doc_to_choice": [
1131
+ "A",
1132
+ "B",
1133
+ "C",
1134
+ "D"
1135
+ ],
1136
+ "description": "The following are multiple choice questions (with answers) about high school european history.\n\n",
1137
+ "target_delimiter": " ",
1138
+ "fewshot_delimiter": "\n\n",
1139
+ "fewshot_config": {
1140
+ "sampler": "first_n"
1141
+ },
1142
+ "metric_list": [
1143
+ {
1144
+ "metric": "acc",
1145
+ "aggregation": "mean",
1146
+ "higher_is_better": true
1147
+ }
1148
+ ],
1149
+ "output_type": "multiple_choice",
1150
+ "repeats": 1,
1151
+ "should_decontaminate": false,
1152
+ "metadata": {
1153
+ "version": 0.0
1154
+ }
1155
+ },
1156
+ "mmlu_high_school_geography": {
1157
+ "task": "mmlu_high_school_geography",
1158
+ "task_alias": "high_school_geography",
1159
+ "group": "mmlu_social_sciences",
1160
+ "group_alias": "social_sciences",
1161
+ "dataset_path": "hails/mmlu_no_train",
1162
+ "dataset_name": "high_school_geography",
1163
+ "test_split": "test",
1164
+ "fewshot_split": "dev",
1165
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1166
+ "doc_to_target": "answer",
1167
+ "doc_to_choice": [
1168
+ "A",
1169
+ "B",
1170
+ "C",
1171
+ "D"
1172
+ ],
1173
+ "description": "The following are multiple choice questions (with answers) about high school geography.\n\n",
1174
+ "target_delimiter": " ",
1175
+ "fewshot_delimiter": "\n\n",
1176
+ "fewshot_config": {
1177
+ "sampler": "first_n"
1178
+ },
1179
+ "metric_list": [
1180
+ {
1181
+ "metric": "acc",
1182
+ "aggregation": "mean",
1183
+ "higher_is_better": true
1184
+ }
1185
+ ],
1186
+ "output_type": "multiple_choice",
1187
+ "repeats": 1,
1188
+ "should_decontaminate": false,
1189
+ "metadata": {
1190
+ "version": 0.0
1191
+ }
1192
+ },
1193
+ "mmlu_high_school_government_and_politics": {
1194
+ "task": "mmlu_high_school_government_and_politics",
1195
+ "task_alias": "high_school_government_and_politics",
1196
+ "group": "mmlu_social_sciences",
1197
+ "group_alias": "social_sciences",
1198
+ "dataset_path": "hails/mmlu_no_train",
1199
+ "dataset_name": "high_school_government_and_politics",
1200
+ "test_split": "test",
1201
+ "fewshot_split": "dev",
1202
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1203
+ "doc_to_target": "answer",
1204
+ "doc_to_choice": [
1205
+ "A",
1206
+ "B",
1207
+ "C",
1208
+ "D"
1209
+ ],
1210
+ "description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n",
1211
+ "target_delimiter": " ",
1212
+ "fewshot_delimiter": "\n\n",
1213
+ "fewshot_config": {
1214
+ "sampler": "first_n"
1215
+ },
1216
+ "metric_list": [
1217
+ {
1218
+ "metric": "acc",
1219
+ "aggregation": "mean",
1220
+ "higher_is_better": true
1221
+ }
1222
+ ],
1223
+ "output_type": "multiple_choice",
1224
+ "repeats": 1,
1225
+ "should_decontaminate": false,
1226
+ "metadata": {
1227
+ "version": 0.0
1228
+ }
1229
+ },
1230
+ "mmlu_high_school_macroeconomics": {
1231
+ "task": "mmlu_high_school_macroeconomics",
1232
+ "task_alias": "high_school_macroeconomics",
1233
+ "group": "mmlu_social_sciences",
1234
+ "group_alias": "social_sciences",
1235
+ "dataset_path": "hails/mmlu_no_train",
1236
+ "dataset_name": "high_school_macroeconomics",
1237
+ "test_split": "test",
1238
+ "fewshot_split": "dev",
1239
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1240
+ "doc_to_target": "answer",
1241
+ "doc_to_choice": [
1242
+ "A",
1243
+ "B",
1244
+ "C",
1245
+ "D"
1246
+ ],
1247
+ "description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n",
1248
+ "target_delimiter": " ",
1249
+ "fewshot_delimiter": "\n\n",
1250
+ "fewshot_config": {
1251
+ "sampler": "first_n"
1252
+ },
1253
+ "metric_list": [
1254
+ {
1255
+ "metric": "acc",
1256
+ "aggregation": "mean",
1257
+ "higher_is_better": true
1258
+ }
1259
+ ],
1260
+ "output_type": "multiple_choice",
1261
+ "repeats": 1,
1262
+ "should_decontaminate": false,
1263
+ "metadata": {
1264
+ "version": 0.0
1265
+ }
1266
+ },
1267
+ "mmlu_high_school_mathematics": {
1268
+ "task": "mmlu_high_school_mathematics",
1269
+ "task_alias": "high_school_mathematics",
1270
+ "group": "mmlu_stem",
1271
+ "group_alias": "stem",
1272
+ "dataset_path": "hails/mmlu_no_train",
1273
+ "dataset_name": "high_school_mathematics",
1274
+ "test_split": "test",
1275
+ "fewshot_split": "dev",
1276
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1277
+ "doc_to_target": "answer",
1278
+ "doc_to_choice": [
1279
+ "A",
1280
+ "B",
1281
+ "C",
1282
+ "D"
1283
+ ],
1284
+ "description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n",
1285
+ "target_delimiter": " ",
1286
+ "fewshot_delimiter": "\n\n",
1287
+ "fewshot_config": {
1288
+ "sampler": "first_n"
1289
+ },
1290
+ "metric_list": [
1291
+ {
1292
+ "metric": "acc",
1293
+ "aggregation": "mean",
1294
+ "higher_is_better": true
1295
+ }
1296
+ ],
1297
+ "output_type": "multiple_choice",
1298
+ "repeats": 1,
1299
+ "should_decontaminate": false,
1300
+ "metadata": {
1301
+ "version": 0.0
1302
+ }
1303
+ },
1304
+ "mmlu_high_school_microeconomics": {
1305
+ "task": "mmlu_high_school_microeconomics",
1306
+ "task_alias": "high_school_microeconomics",
1307
+ "group": "mmlu_social_sciences",
1308
+ "group_alias": "social_sciences",
1309
+ "dataset_path": "hails/mmlu_no_train",
1310
+ "dataset_name": "high_school_microeconomics",
1311
+ "test_split": "test",
1312
+ "fewshot_split": "dev",
1313
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1314
+ "doc_to_target": "answer",
1315
+ "doc_to_choice": [
1316
+ "A",
1317
+ "B",
1318
+ "C",
1319
+ "D"
1320
+ ],
1321
+ "description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n",
1322
+ "target_delimiter": " ",
1323
+ "fewshot_delimiter": "\n\n",
1324
+ "fewshot_config": {
1325
+ "sampler": "first_n"
1326
+ },
1327
+ "metric_list": [
1328
+ {
1329
+ "metric": "acc",
1330
+ "aggregation": "mean",
1331
+ "higher_is_better": true
1332
+ }
1333
+ ],
1334
+ "output_type": "multiple_choice",
1335
+ "repeats": 1,
1336
+ "should_decontaminate": false,
1337
+ "metadata": {
1338
+ "version": 0.0
1339
+ }
1340
+ },
1341
+ "mmlu_high_school_physics": {
1342
+ "task": "mmlu_high_school_physics",
1343
+ "task_alias": "high_school_physics",
1344
+ "group": "mmlu_stem",
1345
+ "group_alias": "stem",
1346
+ "dataset_path": "hails/mmlu_no_train",
1347
+ "dataset_name": "high_school_physics",
1348
+ "test_split": "test",
1349
+ "fewshot_split": "dev",
1350
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1351
+ "doc_to_target": "answer",
1352
+ "doc_to_choice": [
1353
+ "A",
1354
+ "B",
1355
+ "C",
1356
+ "D"
1357
+ ],
1358
+ "description": "The following are multiple choice questions (with answers) about high school physics.\n\n",
1359
+ "target_delimiter": " ",
1360
+ "fewshot_delimiter": "\n\n",
1361
+ "fewshot_config": {
1362
+ "sampler": "first_n"
1363
+ },
1364
+ "metric_list": [
1365
+ {
1366
+ "metric": "acc",
1367
+ "aggregation": "mean",
1368
+ "higher_is_better": true
1369
+ }
1370
+ ],
1371
+ "output_type": "multiple_choice",
1372
+ "repeats": 1,
1373
+ "should_decontaminate": false,
1374
+ "metadata": {
1375
+ "version": 0.0
1376
+ }
1377
+ },
1378
+ "mmlu_high_school_psychology": {
1379
+ "task": "mmlu_high_school_psychology",
1380
+ "task_alias": "high_school_psychology",
1381
+ "group": "mmlu_social_sciences",
1382
+ "group_alias": "social_sciences",
1383
+ "dataset_path": "hails/mmlu_no_train",
1384
+ "dataset_name": "high_school_psychology",
1385
+ "test_split": "test",
1386
+ "fewshot_split": "dev",
1387
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1388
+ "doc_to_target": "answer",
1389
+ "doc_to_choice": [
1390
+ "A",
1391
+ "B",
1392
+ "C",
1393
+ "D"
1394
+ ],
1395
+ "description": "The following are multiple choice questions (with answers) about high school psychology.\n\n",
1396
+ "target_delimiter": " ",
1397
+ "fewshot_delimiter": "\n\n",
1398
+ "fewshot_config": {
1399
+ "sampler": "first_n"
1400
+ },
1401
+ "metric_list": [
1402
+ {
1403
+ "metric": "acc",
1404
+ "aggregation": "mean",
1405
+ "higher_is_better": true
1406
+ }
1407
+ ],
1408
+ "output_type": "multiple_choice",
1409
+ "repeats": 1,
1410
+ "should_decontaminate": false,
1411
+ "metadata": {
1412
+ "version": 0.0
1413
+ }
1414
+ },
1415
+ "mmlu_high_school_statistics": {
1416
+ "task": "mmlu_high_school_statistics",
1417
+ "task_alias": "high_school_statistics",
1418
+ "group": "mmlu_stem",
1419
+ "group_alias": "stem",
1420
+ "dataset_path": "hails/mmlu_no_train",
1421
+ "dataset_name": "high_school_statistics",
1422
+ "test_split": "test",
1423
+ "fewshot_split": "dev",
1424
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1425
+ "doc_to_target": "answer",
1426
+ "doc_to_choice": [
1427
+ "A",
1428
+ "B",
1429
+ "C",
1430
+ "D"
1431
+ ],
1432
+ "description": "The following are multiple choice questions (with answers) about high school statistics.\n\n",
1433
+ "target_delimiter": " ",
1434
+ "fewshot_delimiter": "\n\n",
1435
+ "fewshot_config": {
1436
+ "sampler": "first_n"
1437
+ },
1438
+ "metric_list": [
1439
+ {
1440
+ "metric": "acc",
1441
+ "aggregation": "mean",
1442
+ "higher_is_better": true
1443
+ }
1444
+ ],
1445
+ "output_type": "multiple_choice",
1446
+ "repeats": 1,
1447
+ "should_decontaminate": false,
1448
+ "metadata": {
1449
+ "version": 0.0
1450
+ }
1451
+ },
1452
+ "mmlu_high_school_us_history": {
1453
+ "task": "mmlu_high_school_us_history",
1454
+ "task_alias": "high_school_us_history",
1455
+ "group": "mmlu_humanities",
1456
+ "group_alias": "humanities",
1457
+ "dataset_path": "hails/mmlu_no_train",
1458
+ "dataset_name": "high_school_us_history",
1459
+ "test_split": "test",
1460
+ "fewshot_split": "dev",
1461
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1462
+ "doc_to_target": "answer",
1463
+ "doc_to_choice": [
1464
+ "A",
1465
+ "B",
1466
+ "C",
1467
+ "D"
1468
+ ],
1469
+ "description": "The following are multiple choice questions (with answers) about high school us history.\n\n",
1470
+ "target_delimiter": " ",
1471
+ "fewshot_delimiter": "\n\n",
1472
+ "fewshot_config": {
1473
+ "sampler": "first_n"
1474
+ },
1475
+ "metric_list": [
1476
+ {
1477
+ "metric": "acc",
1478
+ "aggregation": "mean",
1479
+ "higher_is_better": true
1480
+ }
1481
+ ],
1482
+ "output_type": "multiple_choice",
1483
+ "repeats": 1,
1484
+ "should_decontaminate": false,
1485
+ "metadata": {
1486
+ "version": 0.0
1487
+ }
1488
+ },
1489
+ "mmlu_high_school_world_history": {
1490
+ "task": "mmlu_high_school_world_history",
1491
+ "task_alias": "high_school_world_history",
1492
+ "group": "mmlu_humanities",
1493
+ "group_alias": "humanities",
1494
+ "dataset_path": "hails/mmlu_no_train",
1495
+ "dataset_name": "high_school_world_history",
1496
+ "test_split": "test",
1497
+ "fewshot_split": "dev",
1498
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1499
+ "doc_to_target": "answer",
1500
+ "doc_to_choice": [
1501
+ "A",
1502
+ "B",
1503
+ "C",
1504
+ "D"
1505
+ ],
1506
+ "description": "The following are multiple choice questions (with answers) about high school world history.\n\n",
1507
+ "target_delimiter": " ",
1508
+ "fewshot_delimiter": "\n\n",
1509
+ "fewshot_config": {
1510
+ "sampler": "first_n"
1511
+ },
1512
+ "metric_list": [
1513
+ {
1514
+ "metric": "acc",
1515
+ "aggregation": "mean",
1516
+ "higher_is_better": true
1517
+ }
1518
+ ],
1519
+ "output_type": "multiple_choice",
1520
+ "repeats": 1,
1521
+ "should_decontaminate": false,
1522
+ "metadata": {
1523
+ "version": 0.0
1524
+ }
1525
+ },
1526
+ "mmlu_human_aging": {
1527
+ "task": "mmlu_human_aging",
1528
+ "task_alias": "human_aging",
1529
+ "group": "mmlu_other",
1530
+ "group_alias": "other",
1531
+ "dataset_path": "hails/mmlu_no_train",
1532
+ "dataset_name": "human_aging",
1533
+ "test_split": "test",
1534
+ "fewshot_split": "dev",
1535
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1536
+ "doc_to_target": "answer",
1537
+ "doc_to_choice": [
1538
+ "A",
1539
+ "B",
1540
+ "C",
1541
+ "D"
1542
+ ],
1543
+ "description": "The following are multiple choice questions (with answers) about human aging.\n\n",
1544
+ "target_delimiter": " ",
1545
+ "fewshot_delimiter": "\n\n",
1546
+ "fewshot_config": {
1547
+ "sampler": "first_n"
1548
+ },
1549
+ "metric_list": [
1550
+ {
1551
+ "metric": "acc",
1552
+ "aggregation": "mean",
1553
+ "higher_is_better": true
1554
+ }
1555
+ ],
1556
+ "output_type": "multiple_choice",
1557
+ "repeats": 1,
1558
+ "should_decontaminate": false,
1559
+ "metadata": {
1560
+ "version": 0.0
1561
+ }
1562
+ },
1563
+ "mmlu_human_sexuality": {
1564
+ "task": "mmlu_human_sexuality",
1565
+ "task_alias": "human_sexuality",
1566
+ "group": "mmlu_social_sciences",
1567
+ "group_alias": "social_sciences",
1568
+ "dataset_path": "hails/mmlu_no_train",
1569
+ "dataset_name": "human_sexuality",
1570
+ "test_split": "test",
1571
+ "fewshot_split": "dev",
1572
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1573
+ "doc_to_target": "answer",
1574
+ "doc_to_choice": [
1575
+ "A",
1576
+ "B",
1577
+ "C",
1578
+ "D"
1579
+ ],
1580
+ "description": "The following are multiple choice questions (with answers) about human sexuality.\n\n",
1581
+ "target_delimiter": " ",
1582
+ "fewshot_delimiter": "\n\n",
1583
+ "fewshot_config": {
1584
+ "sampler": "first_n"
1585
+ },
1586
+ "metric_list": [
1587
+ {
1588
+ "metric": "acc",
1589
+ "aggregation": "mean",
1590
+ "higher_is_better": true
1591
+ }
1592
+ ],
1593
+ "output_type": "multiple_choice",
1594
+ "repeats": 1,
1595
+ "should_decontaminate": false,
1596
+ "metadata": {
1597
+ "version": 0.0
1598
+ }
1599
+ },
1600
+ "mmlu_international_law": {
1601
+ "task": "mmlu_international_law",
1602
+ "task_alias": "international_law",
1603
+ "group": "mmlu_humanities",
1604
+ "group_alias": "humanities",
1605
+ "dataset_path": "hails/mmlu_no_train",
1606
+ "dataset_name": "international_law",
1607
+ "test_split": "test",
1608
+ "fewshot_split": "dev",
1609
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1610
+ "doc_to_target": "answer",
1611
+ "doc_to_choice": [
1612
+ "A",
1613
+ "B",
1614
+ "C",
1615
+ "D"
1616
+ ],
1617
+ "description": "The following are multiple choice questions (with answers) about international law.\n\n",
1618
+ "target_delimiter": " ",
1619
+ "fewshot_delimiter": "\n\n",
1620
+ "fewshot_config": {
1621
+ "sampler": "first_n"
1622
+ },
1623
+ "metric_list": [
1624
+ {
1625
+ "metric": "acc",
1626
+ "aggregation": "mean",
1627
+ "higher_is_better": true
1628
+ }
1629
+ ],
1630
+ "output_type": "multiple_choice",
1631
+ "repeats": 1,
1632
+ "should_decontaminate": false,
1633
+ "metadata": {
1634
+ "version": 0.0
1635
+ }
1636
+ },
1637
+ "mmlu_jurisprudence": {
1638
+ "task": "mmlu_jurisprudence",
1639
+ "task_alias": "jurisprudence",
1640
+ "group": "mmlu_humanities",
1641
+ "group_alias": "humanities",
1642
+ "dataset_path": "hails/mmlu_no_train",
1643
+ "dataset_name": "jurisprudence",
1644
+ "test_split": "test",
1645
+ "fewshot_split": "dev",
1646
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1647
+ "doc_to_target": "answer",
1648
+ "doc_to_choice": [
1649
+ "A",
1650
+ "B",
1651
+ "C",
1652
+ "D"
1653
+ ],
1654
+ "description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n",
1655
+ "target_delimiter": " ",
1656
+ "fewshot_delimiter": "\n\n",
1657
+ "fewshot_config": {
1658
+ "sampler": "first_n"
1659
+ },
1660
+ "metric_list": [
1661
+ {
1662
+ "metric": "acc",
1663
+ "aggregation": "mean",
1664
+ "higher_is_better": true
1665
+ }
1666
+ ],
1667
+ "output_type": "multiple_choice",
1668
+ "repeats": 1,
1669
+ "should_decontaminate": false,
1670
+ "metadata": {
1671
+ "version": 0.0
1672
+ }
1673
+ },
1674
+ "mmlu_logical_fallacies": {
1675
+ "task": "mmlu_logical_fallacies",
1676
+ "task_alias": "logical_fallacies",
1677
+ "group": "mmlu_humanities",
1678
+ "group_alias": "humanities",
1679
+ "dataset_path": "hails/mmlu_no_train",
1680
+ "dataset_name": "logical_fallacies",
1681
+ "test_split": "test",
1682
+ "fewshot_split": "dev",
1683
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1684
+ "doc_to_target": "answer",
1685
+ "doc_to_choice": [
1686
+ "A",
1687
+ "B",
1688
+ "C",
1689
+ "D"
1690
+ ],
1691
+ "description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n",
1692
+ "target_delimiter": " ",
1693
+ "fewshot_delimiter": "\n\n",
1694
+ "fewshot_config": {
1695
+ "sampler": "first_n"
1696
+ },
1697
+ "metric_list": [
1698
+ {
1699
+ "metric": "acc",
1700
+ "aggregation": "mean",
1701
+ "higher_is_better": true
1702
+ }
1703
+ ],
1704
+ "output_type": "multiple_choice",
1705
+ "repeats": 1,
1706
+ "should_decontaminate": false,
1707
+ "metadata": {
1708
+ "version": 0.0
1709
+ }
1710
+ },
1711
+ "mmlu_machine_learning": {
1712
+ "task": "mmlu_machine_learning",
1713
+ "task_alias": "machine_learning",
1714
+ "group": "mmlu_stem",
1715
+ "group_alias": "stem",
1716
+ "dataset_path": "hails/mmlu_no_train",
1717
+ "dataset_name": "machine_learning",
1718
+ "test_split": "test",
1719
+ "fewshot_split": "dev",
1720
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1721
+ "doc_to_target": "answer",
1722
+ "doc_to_choice": [
1723
+ "A",
1724
+ "B",
1725
+ "C",
1726
+ "D"
1727
+ ],
1728
+ "description": "The following are multiple choice questions (with answers) about machine learning.\n\n",
1729
+ "target_delimiter": " ",
1730
+ "fewshot_delimiter": "\n\n",
1731
+ "fewshot_config": {
1732
+ "sampler": "first_n"
1733
+ },
1734
+ "metric_list": [
1735
+ {
1736
+ "metric": "acc",
1737
+ "aggregation": "mean",
1738
+ "higher_is_better": true
1739
+ }
1740
+ ],
1741
+ "output_type": "multiple_choice",
1742
+ "repeats": 1,
1743
+ "should_decontaminate": false,
1744
+ "metadata": {
1745
+ "version": 0.0
1746
+ }
1747
+ },
1748
+ "mmlu_management": {
1749
+ "task": "mmlu_management",
1750
+ "task_alias": "management",
1751
+ "group": "mmlu_other",
1752
+ "group_alias": "other",
1753
+ "dataset_path": "hails/mmlu_no_train",
1754
+ "dataset_name": "management",
1755
+ "test_split": "test",
1756
+ "fewshot_split": "dev",
1757
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1758
+ "doc_to_target": "answer",
1759
+ "doc_to_choice": [
1760
+ "A",
1761
+ "B",
1762
+ "C",
1763
+ "D"
1764
+ ],
1765
+ "description": "The following are multiple choice questions (with answers) about management.\n\n",
1766
+ "target_delimiter": " ",
1767
+ "fewshot_delimiter": "\n\n",
1768
+ "fewshot_config": {
1769
+ "sampler": "first_n"
1770
+ },
1771
+ "metric_list": [
1772
+ {
1773
+ "metric": "acc",
1774
+ "aggregation": "mean",
1775
+ "higher_is_better": true
1776
+ }
1777
+ ],
1778
+ "output_type": "multiple_choice",
1779
+ "repeats": 1,
1780
+ "should_decontaminate": false,
1781
+ "metadata": {
1782
+ "version": 0.0
1783
+ }
1784
+ },
1785
+ "mmlu_marketing": {
1786
+ "task": "mmlu_marketing",
1787
+ "task_alias": "marketing",
1788
+ "group": "mmlu_other",
1789
+ "group_alias": "other",
1790
+ "dataset_path": "hails/mmlu_no_train",
1791
+ "dataset_name": "marketing",
1792
+ "test_split": "test",
1793
+ "fewshot_split": "dev",
1794
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1795
+ "doc_to_target": "answer",
1796
+ "doc_to_choice": [
1797
+ "A",
1798
+ "B",
1799
+ "C",
1800
+ "D"
1801
+ ],
1802
+ "description": "The following are multiple choice questions (with answers) about marketing.\n\n",
1803
+ "target_delimiter": " ",
1804
+ "fewshot_delimiter": "\n\n",
1805
+ "fewshot_config": {
1806
+ "sampler": "first_n"
1807
+ },
1808
+ "metric_list": [
1809
+ {
1810
+ "metric": "acc",
1811
+ "aggregation": "mean",
1812
+ "higher_is_better": true
1813
+ }
1814
+ ],
1815
+ "output_type": "multiple_choice",
1816
+ "repeats": 1,
1817
+ "should_decontaminate": false,
1818
+ "metadata": {
1819
+ "version": 0.0
1820
+ }
1821
+ },
1822
+ "mmlu_medical_genetics": {
1823
+ "task": "mmlu_medical_genetics",
1824
+ "task_alias": "medical_genetics",
1825
+ "group": "mmlu_other",
1826
+ "group_alias": "other",
1827
+ "dataset_path": "hails/mmlu_no_train",
1828
+ "dataset_name": "medical_genetics",
1829
+ "test_split": "test",
1830
+ "fewshot_split": "dev",
1831
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1832
+ "doc_to_target": "answer",
1833
+ "doc_to_choice": [
1834
+ "A",
1835
+ "B",
1836
+ "C",
1837
+ "D"
1838
+ ],
1839
+ "description": "The following are multiple choice questions (with answers) about medical genetics.\n\n",
1840
+ "target_delimiter": " ",
1841
+ "fewshot_delimiter": "\n\n",
1842
+ "fewshot_config": {
1843
+ "sampler": "first_n"
1844
+ },
1845
+ "metric_list": [
1846
+ {
1847
+ "metric": "acc",
1848
+ "aggregation": "mean",
1849
+ "higher_is_better": true
1850
+ }
1851
+ ],
1852
+ "output_type": "multiple_choice",
1853
+ "repeats": 1,
1854
+ "should_decontaminate": false,
1855
+ "metadata": {
1856
+ "version": 0.0
1857
+ }
1858
+ },
1859
+ "mmlu_miscellaneous": {
1860
+ "task": "mmlu_miscellaneous",
1861
+ "task_alias": "miscellaneous",
1862
+ "group": "mmlu_other",
1863
+ "group_alias": "other",
1864
+ "dataset_path": "hails/mmlu_no_train",
1865
+ "dataset_name": "miscellaneous",
1866
+ "test_split": "test",
1867
+ "fewshot_split": "dev",
1868
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1869
+ "doc_to_target": "answer",
1870
+ "doc_to_choice": [
1871
+ "A",
1872
+ "B",
1873
+ "C",
1874
+ "D"
1875
+ ],
1876
+ "description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n",
1877
+ "target_delimiter": " ",
1878
+ "fewshot_delimiter": "\n\n",
1879
+ "fewshot_config": {
1880
+ "sampler": "first_n"
1881
+ },
1882
+ "metric_list": [
1883
+ {
1884
+ "metric": "acc",
1885
+ "aggregation": "mean",
1886
+ "higher_is_better": true
1887
+ }
1888
+ ],
1889
+ "output_type": "multiple_choice",
1890
+ "repeats": 1,
1891
+ "should_decontaminate": false,
1892
+ "metadata": {
1893
+ "version": 0.0
1894
+ }
1895
+ },
1896
+ "mmlu_moral_disputes": {
1897
+ "task": "mmlu_moral_disputes",
1898
+ "task_alias": "moral_disputes",
1899
+ "group": "mmlu_humanities",
1900
+ "group_alias": "humanities",
1901
+ "dataset_path": "hails/mmlu_no_train",
1902
+ "dataset_name": "moral_disputes",
1903
+ "test_split": "test",
1904
+ "fewshot_split": "dev",
1905
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1906
+ "doc_to_target": "answer",
1907
+ "doc_to_choice": [
1908
+ "A",
1909
+ "B",
1910
+ "C",
1911
+ "D"
1912
+ ],
1913
+ "description": "The following are multiple choice questions (with answers) about moral disputes.\n\n",
1914
+ "target_delimiter": " ",
1915
+ "fewshot_delimiter": "\n\n",
1916
+ "fewshot_config": {
1917
+ "sampler": "first_n"
1918
+ },
1919
+ "metric_list": [
1920
+ {
1921
+ "metric": "acc",
1922
+ "aggregation": "mean",
1923
+ "higher_is_better": true
1924
+ }
1925
+ ],
1926
+ "output_type": "multiple_choice",
1927
+ "repeats": 1,
1928
+ "should_decontaminate": false,
1929
+ "metadata": {
1930
+ "version": 0.0
1931
+ }
1932
+ },
1933
+ "mmlu_moral_scenarios": {
1934
+ "task": "mmlu_moral_scenarios",
1935
+ "task_alias": "moral_scenarios",
1936
+ "group": "mmlu_humanities",
1937
+ "group_alias": "humanities",
1938
+ "dataset_path": "hails/mmlu_no_train",
1939
+ "dataset_name": "moral_scenarios",
1940
+ "test_split": "test",
1941
+ "fewshot_split": "dev",
1942
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1943
+ "doc_to_target": "answer",
1944
+ "doc_to_choice": [
1945
+ "A",
1946
+ "B",
1947
+ "C",
1948
+ "D"
1949
+ ],
1950
+ "description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n",
1951
+ "target_delimiter": " ",
1952
+ "fewshot_delimiter": "\n\n",
1953
+ "fewshot_config": {
1954
+ "sampler": "first_n"
1955
+ },
1956
+ "metric_list": [
1957
+ {
1958
+ "metric": "acc",
1959
+ "aggregation": "mean",
1960
+ "higher_is_better": true
1961
+ }
1962
+ ],
1963
+ "output_type": "multiple_choice",
1964
+ "repeats": 1,
1965
+ "should_decontaminate": false,
1966
+ "metadata": {
1967
+ "version": 0.0
1968
+ }
1969
+ },
1970
+ "mmlu_nutrition": {
1971
+ "task": "mmlu_nutrition",
1972
+ "task_alias": "nutrition",
1973
+ "group": "mmlu_other",
1974
+ "group_alias": "other",
1975
+ "dataset_path": "hails/mmlu_no_train",
1976
+ "dataset_name": "nutrition",
1977
+ "test_split": "test",
1978
+ "fewshot_split": "dev",
1979
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
1980
+ "doc_to_target": "answer",
1981
+ "doc_to_choice": [
1982
+ "A",
1983
+ "B",
1984
+ "C",
1985
+ "D"
1986
+ ],
1987
+ "description": "The following are multiple choice questions (with answers) about nutrition.\n\n",
1988
+ "target_delimiter": " ",
1989
+ "fewshot_delimiter": "\n\n",
1990
+ "fewshot_config": {
1991
+ "sampler": "first_n"
1992
+ },
1993
+ "metric_list": [
1994
+ {
1995
+ "metric": "acc",
1996
+ "aggregation": "mean",
1997
+ "higher_is_better": true
1998
+ }
1999
+ ],
2000
+ "output_type": "multiple_choice",
2001
+ "repeats": 1,
2002
+ "should_decontaminate": false,
2003
+ "metadata": {
2004
+ "version": 0.0
2005
+ }
2006
+ },
2007
+ "mmlu_philosophy": {
2008
+ "task": "mmlu_philosophy",
2009
+ "task_alias": "philosophy",
2010
+ "group": "mmlu_humanities",
2011
+ "group_alias": "humanities",
2012
+ "dataset_path": "hails/mmlu_no_train",
2013
+ "dataset_name": "philosophy",
2014
+ "test_split": "test",
2015
+ "fewshot_split": "dev",
2016
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2017
+ "doc_to_target": "answer",
2018
+ "doc_to_choice": [
2019
+ "A",
2020
+ "B",
2021
+ "C",
2022
+ "D"
2023
+ ],
2024
+ "description": "The following are multiple choice questions (with answers) about philosophy.\n\n",
2025
+ "target_delimiter": " ",
2026
+ "fewshot_delimiter": "\n\n",
2027
+ "fewshot_config": {
2028
+ "sampler": "first_n"
2029
+ },
2030
+ "metric_list": [
2031
+ {
2032
+ "metric": "acc",
2033
+ "aggregation": "mean",
2034
+ "higher_is_better": true
2035
+ }
2036
+ ],
2037
+ "output_type": "multiple_choice",
2038
+ "repeats": 1,
2039
+ "should_decontaminate": false,
2040
+ "metadata": {
2041
+ "version": 0.0
2042
+ }
2043
+ },
2044
+ "mmlu_prehistory": {
2045
+ "task": "mmlu_prehistory",
2046
+ "task_alias": "prehistory",
2047
+ "group": "mmlu_humanities",
2048
+ "group_alias": "humanities",
2049
+ "dataset_path": "hails/mmlu_no_train",
2050
+ "dataset_name": "prehistory",
2051
+ "test_split": "test",
2052
+ "fewshot_split": "dev",
2053
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2054
+ "doc_to_target": "answer",
2055
+ "doc_to_choice": [
2056
+ "A",
2057
+ "B",
2058
+ "C",
2059
+ "D"
2060
+ ],
2061
+ "description": "The following are multiple choice questions (with answers) about prehistory.\n\n",
2062
+ "target_delimiter": " ",
2063
+ "fewshot_delimiter": "\n\n",
2064
+ "fewshot_config": {
2065
+ "sampler": "first_n"
2066
+ },
2067
+ "metric_list": [
2068
+ {
2069
+ "metric": "acc",
2070
+ "aggregation": "mean",
2071
+ "higher_is_better": true
2072
+ }
2073
+ ],
2074
+ "output_type": "multiple_choice",
2075
+ "repeats": 1,
2076
+ "should_decontaminate": false,
2077
+ "metadata": {
2078
+ "version": 0.0
2079
+ }
2080
+ },
2081
+ "mmlu_professional_accounting": {
2082
+ "task": "mmlu_professional_accounting",
2083
+ "task_alias": "professional_accounting",
2084
+ "group": "mmlu_other",
2085
+ "group_alias": "other",
2086
+ "dataset_path": "hails/mmlu_no_train",
2087
+ "dataset_name": "professional_accounting",
2088
+ "test_split": "test",
2089
+ "fewshot_split": "dev",
2090
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2091
+ "doc_to_target": "answer",
2092
+ "doc_to_choice": [
2093
+ "A",
2094
+ "B",
2095
+ "C",
2096
+ "D"
2097
+ ],
2098
+ "description": "The following are multiple choice questions (with answers) about professional accounting.\n\n",
2099
+ "target_delimiter": " ",
2100
+ "fewshot_delimiter": "\n\n",
2101
+ "fewshot_config": {
2102
+ "sampler": "first_n"
2103
+ },
2104
+ "metric_list": [
2105
+ {
2106
+ "metric": "acc",
2107
+ "aggregation": "mean",
2108
+ "higher_is_better": true
2109
+ }
2110
+ ],
2111
+ "output_type": "multiple_choice",
2112
+ "repeats": 1,
2113
+ "should_decontaminate": false,
2114
+ "metadata": {
2115
+ "version": 0.0
2116
+ }
2117
+ },
2118
+ "mmlu_professional_law": {
2119
+ "task": "mmlu_professional_law",
2120
+ "task_alias": "professional_law",
2121
+ "group": "mmlu_humanities",
2122
+ "group_alias": "humanities",
2123
+ "dataset_path": "hails/mmlu_no_train",
2124
+ "dataset_name": "professional_law",
2125
+ "test_split": "test",
2126
+ "fewshot_split": "dev",
2127
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2128
+ "doc_to_target": "answer",
2129
+ "doc_to_choice": [
2130
+ "A",
2131
+ "B",
2132
+ "C",
2133
+ "D"
2134
+ ],
2135
+ "description": "The following are multiple choice questions (with answers) about professional law.\n\n",
2136
+ "target_delimiter": " ",
2137
+ "fewshot_delimiter": "\n\n",
2138
+ "fewshot_config": {
2139
+ "sampler": "first_n"
2140
+ },
2141
+ "metric_list": [
2142
+ {
2143
+ "metric": "acc",
2144
+ "aggregation": "mean",
2145
+ "higher_is_better": true
2146
+ }
2147
+ ],
2148
+ "output_type": "multiple_choice",
2149
+ "repeats": 1,
2150
+ "should_decontaminate": false,
2151
+ "metadata": {
2152
+ "version": 0.0
2153
+ }
2154
+ },
2155
+ "mmlu_professional_medicine": {
2156
+ "task": "mmlu_professional_medicine",
2157
+ "task_alias": "professional_medicine",
2158
+ "group": "mmlu_other",
2159
+ "group_alias": "other",
2160
+ "dataset_path": "hails/mmlu_no_train",
2161
+ "dataset_name": "professional_medicine",
2162
+ "test_split": "test",
2163
+ "fewshot_split": "dev",
2164
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2165
+ "doc_to_target": "answer",
2166
+ "doc_to_choice": [
2167
+ "A",
2168
+ "B",
2169
+ "C",
2170
+ "D"
2171
+ ],
2172
+ "description": "The following are multiple choice questions (with answers) about professional medicine.\n\n",
2173
+ "target_delimiter": " ",
2174
+ "fewshot_delimiter": "\n\n",
2175
+ "fewshot_config": {
2176
+ "sampler": "first_n"
2177
+ },
2178
+ "metric_list": [
2179
+ {
2180
+ "metric": "acc",
2181
+ "aggregation": "mean",
2182
+ "higher_is_better": true
2183
+ }
2184
+ ],
2185
+ "output_type": "multiple_choice",
2186
+ "repeats": 1,
2187
+ "should_decontaminate": false,
2188
+ "metadata": {
2189
+ "version": 0.0
2190
+ }
2191
+ },
2192
+ "mmlu_professional_psychology": {
2193
+ "task": "mmlu_professional_psychology",
2194
+ "task_alias": "professional_psychology",
2195
+ "group": "mmlu_social_sciences",
2196
+ "group_alias": "social_sciences",
2197
+ "dataset_path": "hails/mmlu_no_train",
2198
+ "dataset_name": "professional_psychology",
2199
+ "test_split": "test",
2200
+ "fewshot_split": "dev",
2201
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2202
+ "doc_to_target": "answer",
2203
+ "doc_to_choice": [
2204
+ "A",
2205
+ "B",
2206
+ "C",
2207
+ "D"
2208
+ ],
2209
+ "description": "The following are multiple choice questions (with answers) about professional psychology.\n\n",
2210
+ "target_delimiter": " ",
2211
+ "fewshot_delimiter": "\n\n",
2212
+ "fewshot_config": {
2213
+ "sampler": "first_n"
2214
+ },
2215
+ "metric_list": [
2216
+ {
2217
+ "metric": "acc",
2218
+ "aggregation": "mean",
2219
+ "higher_is_better": true
2220
+ }
2221
+ ],
2222
+ "output_type": "multiple_choice",
2223
+ "repeats": 1,
2224
+ "should_decontaminate": false,
2225
+ "metadata": {
2226
+ "version": 0.0
2227
+ }
2228
+ },
2229
+ "mmlu_public_relations": {
2230
+ "task": "mmlu_public_relations",
2231
+ "task_alias": "public_relations",
2232
+ "group": "mmlu_social_sciences",
2233
+ "group_alias": "social_sciences",
2234
+ "dataset_path": "hails/mmlu_no_train",
2235
+ "dataset_name": "public_relations",
2236
+ "test_split": "test",
2237
+ "fewshot_split": "dev",
2238
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2239
+ "doc_to_target": "answer",
2240
+ "doc_to_choice": [
2241
+ "A",
2242
+ "B",
2243
+ "C",
2244
+ "D"
2245
+ ],
2246
+ "description": "The following are multiple choice questions (with answers) about public relations.\n\n",
2247
+ "target_delimiter": " ",
2248
+ "fewshot_delimiter": "\n\n",
2249
+ "fewshot_config": {
2250
+ "sampler": "first_n"
2251
+ },
2252
+ "metric_list": [
2253
+ {
2254
+ "metric": "acc",
2255
+ "aggregation": "mean",
2256
+ "higher_is_better": true
2257
+ }
2258
+ ],
2259
+ "output_type": "multiple_choice",
2260
+ "repeats": 1,
2261
+ "should_decontaminate": false,
2262
+ "metadata": {
2263
+ "version": 0.0
2264
+ }
2265
+ },
2266
+ "mmlu_security_studies": {
2267
+ "task": "mmlu_security_studies",
2268
+ "task_alias": "security_studies",
2269
+ "group": "mmlu_social_sciences",
2270
+ "group_alias": "social_sciences",
2271
+ "dataset_path": "hails/mmlu_no_train",
2272
+ "dataset_name": "security_studies",
2273
+ "test_split": "test",
2274
+ "fewshot_split": "dev",
2275
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2276
+ "doc_to_target": "answer",
2277
+ "doc_to_choice": [
2278
+ "A",
2279
+ "B",
2280
+ "C",
2281
+ "D"
2282
+ ],
2283
+ "description": "The following are multiple choice questions (with answers) about security studies.\n\n",
2284
+ "target_delimiter": " ",
2285
+ "fewshot_delimiter": "\n\n",
2286
+ "fewshot_config": {
2287
+ "sampler": "first_n"
2288
+ },
2289
+ "metric_list": [
2290
+ {
2291
+ "metric": "acc",
2292
+ "aggregation": "mean",
2293
+ "higher_is_better": true
2294
+ }
2295
+ ],
2296
+ "output_type": "multiple_choice",
2297
+ "repeats": 1,
2298
+ "should_decontaminate": false,
2299
+ "metadata": {
2300
+ "version": 0.0
2301
+ }
2302
+ },
2303
+ "mmlu_sociology": {
2304
+ "task": "mmlu_sociology",
2305
+ "task_alias": "sociology",
2306
+ "group": "mmlu_social_sciences",
2307
+ "group_alias": "social_sciences",
2308
+ "dataset_path": "hails/mmlu_no_train",
2309
+ "dataset_name": "sociology",
2310
+ "test_split": "test",
2311
+ "fewshot_split": "dev",
2312
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2313
+ "doc_to_target": "answer",
2314
+ "doc_to_choice": [
2315
+ "A",
2316
+ "B",
2317
+ "C",
2318
+ "D"
2319
+ ],
2320
+ "description": "The following are multiple choice questions (with answers) about sociology.\n\n",
2321
+ "target_delimiter": " ",
2322
+ "fewshot_delimiter": "\n\n",
2323
+ "fewshot_config": {
2324
+ "sampler": "first_n"
2325
+ },
2326
+ "metric_list": [
2327
+ {
2328
+ "metric": "acc",
2329
+ "aggregation": "mean",
2330
+ "higher_is_better": true
2331
+ }
2332
+ ],
2333
+ "output_type": "multiple_choice",
2334
+ "repeats": 1,
2335
+ "should_decontaminate": false,
2336
+ "metadata": {
2337
+ "version": 0.0
2338
+ }
2339
+ },
2340
+ "mmlu_us_foreign_policy": {
2341
+ "task": "mmlu_us_foreign_policy",
2342
+ "task_alias": "us_foreign_policy",
2343
+ "group": "mmlu_social_sciences",
2344
+ "group_alias": "social_sciences",
2345
+ "dataset_path": "hails/mmlu_no_train",
2346
+ "dataset_name": "us_foreign_policy",
2347
+ "test_split": "test",
2348
+ "fewshot_split": "dev",
2349
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2350
+ "doc_to_target": "answer",
2351
+ "doc_to_choice": [
2352
+ "A",
2353
+ "B",
2354
+ "C",
2355
+ "D"
2356
+ ],
2357
+ "description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n",
2358
+ "target_delimiter": " ",
2359
+ "fewshot_delimiter": "\n\n",
2360
+ "fewshot_config": {
2361
+ "sampler": "first_n"
2362
+ },
2363
+ "metric_list": [
2364
+ {
2365
+ "metric": "acc",
2366
+ "aggregation": "mean",
2367
+ "higher_is_better": true
2368
+ }
2369
+ ],
2370
+ "output_type": "multiple_choice",
2371
+ "repeats": 1,
2372
+ "should_decontaminate": false,
2373
+ "metadata": {
2374
+ "version": 0.0
2375
+ }
2376
+ },
2377
+ "mmlu_virology": {
2378
+ "task": "mmlu_virology",
2379
+ "task_alias": "virology",
2380
+ "group": "mmlu_other",
2381
+ "group_alias": "other",
2382
+ "dataset_path": "hails/mmlu_no_train",
2383
+ "dataset_name": "virology",
2384
+ "test_split": "test",
2385
+ "fewshot_split": "dev",
2386
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2387
+ "doc_to_target": "answer",
2388
+ "doc_to_choice": [
2389
+ "A",
2390
+ "B",
2391
+ "C",
2392
+ "D"
2393
+ ],
2394
+ "description": "The following are multiple choice questions (with answers) about virology.\n\n",
2395
+ "target_delimiter": " ",
2396
+ "fewshot_delimiter": "\n\n",
2397
+ "fewshot_config": {
2398
+ "sampler": "first_n"
2399
+ },
2400
+ "metric_list": [
2401
+ {
2402
+ "metric": "acc",
2403
+ "aggregation": "mean",
2404
+ "higher_is_better": true
2405
+ }
2406
+ ],
2407
+ "output_type": "multiple_choice",
2408
+ "repeats": 1,
2409
+ "should_decontaminate": false,
2410
+ "metadata": {
2411
+ "version": 0.0
2412
+ }
2413
+ },
2414
+ "mmlu_world_religions": {
2415
+ "task": "mmlu_world_religions",
2416
+ "task_alias": "world_religions",
2417
+ "group": "mmlu_humanities",
2418
+ "group_alias": "humanities",
2419
+ "dataset_path": "hails/mmlu_no_train",
2420
+ "dataset_name": "world_religions",
2421
+ "test_split": "test",
2422
+ "fewshot_split": "dev",
2423
+ "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:",
2424
+ "doc_to_target": "answer",
2425
+ "doc_to_choice": [
2426
+ "A",
2427
+ "B",
2428
+ "C",
2429
+ "D"
2430
+ ],
2431
+ "description": "The following are multiple choice questions (with answers) about world religions.\n\n",
2432
+ "target_delimiter": " ",
2433
+ "fewshot_delimiter": "\n\n",
2434
+ "fewshot_config": {
2435
+ "sampler": "first_n"
2436
+ },
2437
+ "metric_list": [
2438
+ {
2439
+ "metric": "acc",
2440
+ "aggregation": "mean",
2441
+ "higher_is_better": true
2442
+ }
2443
+ ],
2444
+ "output_type": "multiple_choice",
2445
+ "repeats": 1,
2446
+ "should_decontaminate": false,
2447
+ "metadata": {
2448
+ "version": 0.0
2449
+ }
2450
+ }
2451
+ },
2452
+ "versions": {
2453
+ "mmlu": "N/A",
2454
+ "mmlu_abstract_algebra": 0.0,
2455
+ "mmlu_anatomy": 0.0,
2456
+ "mmlu_astronomy": 0.0,
2457
+ "mmlu_business_ethics": 0.0,
2458
+ "mmlu_clinical_knowledge": 0.0,
2459
+ "mmlu_college_biology": 0.0,
2460
+ "mmlu_college_chemistry": 0.0,
2461
+ "mmlu_college_computer_science": 0.0,
2462
+ "mmlu_college_mathematics": 0.0,
2463
+ "mmlu_college_medicine": 0.0,
2464
+ "mmlu_college_physics": 0.0,
2465
+ "mmlu_computer_security": 0.0,
2466
+ "mmlu_conceptual_physics": 0.0,
2467
+ "mmlu_econometrics": 0.0,
2468
+ "mmlu_electrical_engineering": 0.0,
2469
+ "mmlu_elementary_mathematics": 0.0,
2470
+ "mmlu_formal_logic": 0.0,
2471
+ "mmlu_global_facts": 0.0,
2472
+ "mmlu_high_school_biology": 0.0,
2473
+ "mmlu_high_school_chemistry": 0.0,
2474
+ "mmlu_high_school_computer_science": 0.0,
2475
+ "mmlu_high_school_european_history": 0.0,
2476
+ "mmlu_high_school_geography": 0.0,
2477
+ "mmlu_high_school_government_and_politics": 0.0,
2478
+ "mmlu_high_school_macroeconomics": 0.0,
2479
+ "mmlu_high_school_mathematics": 0.0,
2480
+ "mmlu_high_school_microeconomics": 0.0,
2481
+ "mmlu_high_school_physics": 0.0,
2482
+ "mmlu_high_school_psychology": 0.0,
2483
+ "mmlu_high_school_statistics": 0.0,
2484
+ "mmlu_high_school_us_history": 0.0,
2485
+ "mmlu_high_school_world_history": 0.0,
2486
+ "mmlu_human_aging": 0.0,
2487
+ "mmlu_human_sexuality": 0.0,
2488
+ "mmlu_humanities": "N/A",
2489
+ "mmlu_international_law": 0.0,
2490
+ "mmlu_jurisprudence": 0.0,
2491
+ "mmlu_logical_fallacies": 0.0,
2492
+ "mmlu_machine_learning": 0.0,
2493
+ "mmlu_management": 0.0,
2494
+ "mmlu_marketing": 0.0,
2495
+ "mmlu_medical_genetics": 0.0,
2496
+ "mmlu_miscellaneous": 0.0,
2497
+ "mmlu_moral_disputes": 0.0,
2498
+ "mmlu_moral_scenarios": 0.0,
2499
+ "mmlu_nutrition": 0.0,
2500
+ "mmlu_other": "N/A",
2501
+ "mmlu_philosophy": 0.0,
2502
+ "mmlu_prehistory": 0.0,
2503
+ "mmlu_professional_accounting": 0.0,
2504
+ "mmlu_professional_law": 0.0,
2505
+ "mmlu_professional_medicine": 0.0,
2506
+ "mmlu_professional_psychology": 0.0,
2507
+ "mmlu_public_relations": 0.0,
2508
+ "mmlu_security_studies": 0.0,
2509
+ "mmlu_social_sciences": "N/A",
2510
+ "mmlu_sociology": 0.0,
2511
+ "mmlu_stem": "N/A",
2512
+ "mmlu_us_foreign_policy": 0.0,
2513
+ "mmlu_virology": 0.0,
2514
+ "mmlu_world_religions": 0.0
2515
+ },
2516
+ "n-shot": {
2517
+ "mmlu": 0,
2518
+ "mmlu_abstract_algebra": 0,
2519
+ "mmlu_anatomy": 0,
2520
+ "mmlu_astronomy": 0,
2521
+ "mmlu_business_ethics": 0,
2522
+ "mmlu_clinical_knowledge": 0,
2523
+ "mmlu_college_biology": 0,
2524
+ "mmlu_college_chemistry": 0,
2525
+ "mmlu_college_computer_science": 0,
2526
+ "mmlu_college_mathematics": 0,
2527
+ "mmlu_college_medicine": 0,
2528
+ "mmlu_college_physics": 0,
2529
+ "mmlu_computer_security": 0,
2530
+ "mmlu_conceptual_physics": 0,
2531
+ "mmlu_econometrics": 0,
2532
+ "mmlu_electrical_engineering": 0,
2533
+ "mmlu_elementary_mathematics": 0,
2534
+ "mmlu_formal_logic": 0,
2535
+ "mmlu_global_facts": 0,
2536
+ "mmlu_high_school_biology": 0,
2537
+ "mmlu_high_school_chemistry": 0,
2538
+ "mmlu_high_school_computer_science": 0,
2539
+ "mmlu_high_school_european_history": 0,
2540
+ "mmlu_high_school_geography": 0,
2541
+ "mmlu_high_school_government_and_politics": 0,
2542
+ "mmlu_high_school_macroeconomics": 0,
2543
+ "mmlu_high_school_mathematics": 0,
2544
+ "mmlu_high_school_microeconomics": 0,
2545
+ "mmlu_high_school_physics": 0,
2546
+ "mmlu_high_school_psychology": 0,
2547
+ "mmlu_high_school_statistics": 0,
2548
+ "mmlu_high_school_us_history": 0,
2549
+ "mmlu_high_school_world_history": 0,
2550
+ "mmlu_human_aging": 0,
2551
+ "mmlu_human_sexuality": 0,
2552
+ "mmlu_humanities": 0,
2553
+ "mmlu_international_law": 0,
2554
+ "mmlu_jurisprudence": 0,
2555
+ "mmlu_logical_fallacies": 0,
2556
+ "mmlu_machine_learning": 0,
2557
+ "mmlu_management": 0,
2558
+ "mmlu_marketing": 0,
2559
+ "mmlu_medical_genetics": 0,
2560
+ "mmlu_miscellaneous": 0,
2561
+ "mmlu_moral_disputes": 0,
2562
+ "mmlu_moral_scenarios": 0,
2563
+ "mmlu_nutrition": 0,
2564
+ "mmlu_other": 0,
2565
+ "mmlu_philosophy": 0,
2566
+ "mmlu_prehistory": 0,
2567
+ "mmlu_professional_accounting": 0,
2568
+ "mmlu_professional_law": 0,
2569
+ "mmlu_professional_medicine": 0,
2570
+ "mmlu_professional_psychology": 0,
2571
+ "mmlu_public_relations": 0,
2572
+ "mmlu_security_studies": 0,
2573
+ "mmlu_social_sciences": 0,
2574
+ "mmlu_sociology": 0,
2575
+ "mmlu_stem": 0,
2576
+ "mmlu_us_foreign_policy": 0,
2577
+ "mmlu_virology": 0,
2578
+ "mmlu_world_religions": 0
2579
+ },
2580
+ "config": {
2581
+ "model": "hf",
2582
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
2583
+ "batch_size": "auto",
2584
+ "batch_sizes": [
2585
+ 32
2586
+ ],
2587
+ "device": null,
2588
+ "use_cache": null,
2589
+ "limit": null,
2590
+ "bootstrap_iters": 100000,
2591
+ "gen_kwargs": null
2592
+ },
2593
+ "git_hash": "1ee41f7"
2594
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/mmlu/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:287411341235a46519e378cb226b7d278648aa9c1ae2f258b4fc3ed273fbfe05
3
+ size 122208
lm-eval-output/SmerkyG/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "openbookqa": {
4
+ "acc,none": 0.254,
5
+ "acc_stderr,none": 0.01948659680164338,
6
+ "acc_norm,none": 0.36,
7
+ "acc_norm_stderr,none": 0.021487751089720522,
8
+ "alias": "openbookqa"
9
+ }
10
+ },
11
+ "configs": {
12
+ "openbookqa": {
13
+ "task": "openbookqa",
14
+ "dataset_path": "openbookqa",
15
+ "dataset_name": "main",
16
+ "training_split": "train",
17
+ "validation_split": "validation",
18
+ "test_split": "test",
19
+ "doc_to_text": "question_stem",
20
+ "doc_to_target": "{{choices.label.index(answerKey.lstrip())}}",
21
+ "doc_to_choice": "{{choices.text}}",
22
+ "description": "",
23
+ "target_delimiter": " ",
24
+ "fewshot_delimiter": "\n\n",
25
+ "metric_list": [
26
+ {
27
+ "metric": "acc",
28
+ "aggregation": "mean",
29
+ "higher_is_better": true
30
+ },
31
+ {
32
+ "metric": "acc_norm",
33
+ "aggregation": "mean",
34
+ "higher_is_better": true
35
+ }
36
+ ],
37
+ "output_type": "multiple_choice",
38
+ "repeats": 1,
39
+ "should_decontaminate": true,
40
+ "doc_to_decontamination_query": "question_stem",
41
+ "metadata": {
42
+ "version": 1.0
43
+ }
44
+ }
45
+ },
46
+ "versions": {
47
+ "openbookqa": 1.0
48
+ },
49
+ "n-shot": {
50
+ "openbookqa": 0
51
+ },
52
+ "config": {
53
+ "model": "hf",
54
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
55
+ "batch_size": "auto",
56
+ "batch_sizes": [
57
+ 64
58
+ ],
59
+ "device": null,
60
+ "use_cache": null,
61
+ "limit": null,
62
+ "bootstrap_iters": 100000,
63
+ "gen_kwargs": null
64
+ },
65
+ "git_hash": "1ee41f7"
66
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/openbookqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dc0559bb19333d7600ca5fd2d783ae4a475351c97f1ceb5639c9e239f890941
3
+ size 36955
lm-eval-output/SmerkyG/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,283 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "pawsx": {
4
+ "acc,none": 0.5192857142857144,
5
+ "acc_stderr,none": 0.029939594331147804,
6
+ "alias": "pawsx"
7
+ },
8
+ "paws_de": {
9
+ "acc,none": 0.4845,
10
+ "acc_stderr,none": 0.011177761232603322,
11
+ "alias": " - paws_de"
12
+ },
13
+ "paws_en": {
14
+ "acc,none": 0.456,
15
+ "acc_stderr,none": 0.011139750761283311,
16
+ "alias": " - paws_en"
17
+ },
18
+ "paws_es": {
19
+ "acc,none": 0.533,
20
+ "acc_stderr,none": 0.011158752568250675,
21
+ "alias": " - paws_es"
22
+ },
23
+ "paws_fr": {
24
+ "acc,none": 0.5485,
25
+ "acc_stderr,none": 0.011130400617630765,
26
+ "alias": " - paws_fr"
27
+ },
28
+ "paws_ja": {
29
+ "acc,none": 0.557,
30
+ "acc_stderr,none": 0.011110230358066709,
31
+ "alias": " - paws_ja"
32
+ },
33
+ "paws_ko": {
34
+ "acc,none": 0.52,
35
+ "acc_stderr,none": 0.011174185930778305,
36
+ "alias": " - paws_ko"
37
+ },
38
+ "paws_zh": {
39
+ "acc,none": 0.536,
40
+ "acc_stderr,none": 0.011154111668060216,
41
+ "alias": " - paws_zh"
42
+ }
43
+ },
44
+ "groups": {
45
+ "pawsx": {
46
+ "acc,none": 0.5192857142857144,
47
+ "acc_stderr,none": 0.029939594331147804,
48
+ "alias": "pawsx"
49
+ }
50
+ },
51
+ "configs": {
52
+ "paws_de": {
53
+ "task": "paws_de",
54
+ "group": "pawsx",
55
+ "dataset_path": "paws-x",
56
+ "dataset_name": "de",
57
+ "training_split": "train",
58
+ "validation_split": "validation",
59
+ "test_split": "test",
60
+ "doc_to_text": "",
61
+ "doc_to_target": "label",
62
+ "doc_to_choice": "{{[sentence1+\", richtig? Ja, \"+sentence2, sentence1+\", richtig? Nein, \"+sentence2]}}",
63
+ "description": "",
64
+ "target_delimiter": " ",
65
+ "fewshot_delimiter": "\n\n",
66
+ "metric_list": [
67
+ {
68
+ "metric": "acc",
69
+ "aggregation": "mean",
70
+ "higher_is_better": true
71
+ }
72
+ ],
73
+ "output_type": "multiple_choice",
74
+ "repeats": 1,
75
+ "should_decontaminate": false,
76
+ "metadata": {
77
+ "version": 0.0
78
+ }
79
+ },
80
+ "paws_en": {
81
+ "task": "paws_en",
82
+ "group": "pawsx",
83
+ "dataset_path": "paws-x",
84
+ "dataset_name": "en",
85
+ "training_split": "train",
86
+ "validation_split": "validation",
87
+ "test_split": "test",
88
+ "doc_to_text": "",
89
+ "doc_to_target": "label",
90
+ "doc_to_choice": "{{[sentence1+\", right? Yes, \"+sentence2, sentence1+\", right? No, \"+sentence2]}}",
91
+ "description": "",
92
+ "target_delimiter": " ",
93
+ "fewshot_delimiter": "\n\n",
94
+ "metric_list": [
95
+ {
96
+ "metric": "acc",
97
+ "aggregation": "mean",
98
+ "higher_is_better": true
99
+ }
100
+ ],
101
+ "output_type": "multiple_choice",
102
+ "repeats": 1,
103
+ "should_decontaminate": false,
104
+ "metadata": {
105
+ "version": 0.0
106
+ }
107
+ },
108
+ "paws_es": {
109
+ "task": "paws_es",
110
+ "group": "pawsx",
111
+ "dataset_path": "paws-x",
112
+ "dataset_name": "es",
113
+ "training_split": "train",
114
+ "validation_split": "validation",
115
+ "test_split": "test",
116
+ "doc_to_text": "",
117
+ "doc_to_target": "label",
118
+ "doc_to_choice": "{{[sentence1+\", verdad? Sí, \"+sentence2, sentence1+\", verdad? No, \"+sentence2]}}",
119
+ "description": "",
120
+ "target_delimiter": " ",
121
+ "fewshot_delimiter": "\n\n",
122
+ "metric_list": [
123
+ {
124
+ "metric": "acc",
125
+ "aggregation": "mean",
126
+ "higher_is_better": true
127
+ }
128
+ ],
129
+ "output_type": "multiple_choice",
130
+ "repeats": 1,
131
+ "should_decontaminate": false,
132
+ "metadata": {
133
+ "version": 0.0
134
+ }
135
+ },
136
+ "paws_fr": {
137
+ "task": "paws_fr",
138
+ "group": "pawsx",
139
+ "dataset_path": "paws-x",
140
+ "dataset_name": "fr",
141
+ "training_split": "train",
142
+ "validation_split": "validation",
143
+ "test_split": "test",
144
+ "doc_to_text": "",
145
+ "doc_to_target": "label",
146
+ "doc_to_choice": "{{[sentence1+\", n'est-ce pas? Oui, \"+sentence2, sentence1+\", n'est-ce pas? No, \"+sentence2]}}",
147
+ "description": "",
148
+ "target_delimiter": " ",
149
+ "fewshot_delimiter": "\n\n",
150
+ "metric_list": [
151
+ {
152
+ "metric": "acc",
153
+ "aggregation": "mean",
154
+ "higher_is_better": true
155
+ }
156
+ ],
157
+ "output_type": "multiple_choice",
158
+ "repeats": 1,
159
+ "should_decontaminate": false,
160
+ "metadata": {
161
+ "version": 0.0
162
+ }
163
+ },
164
+ "paws_ja": {
165
+ "task": "paws_ja",
166
+ "group": "pawsx",
167
+ "dataset_path": "paws-x",
168
+ "dataset_name": "ja",
169
+ "training_split": "train",
170
+ "validation_split": "validation",
171
+ "test_split": "test",
172
+ "doc_to_text": "",
173
+ "doc_to_target": "label",
174
+ "doc_to_choice": "{{[sentence1+\", ですね? はい, \"+sentence2, sentence1+\", ですね? いいえ, \"+sentence2]}}",
175
+ "description": "",
176
+ "target_delimiter": " ",
177
+ "fewshot_delimiter": "\n\n",
178
+ "metric_list": [
179
+ {
180
+ "metric": "acc",
181
+ "aggregation": "mean",
182
+ "higher_is_better": true
183
+ }
184
+ ],
185
+ "output_type": "multiple_choice",
186
+ "repeats": 1,
187
+ "should_decontaminate": false,
188
+ "metadata": {
189
+ "version": 0.0
190
+ }
191
+ },
192
+ "paws_ko": {
193
+ "task": "paws_ko",
194
+ "group": "pawsx",
195
+ "dataset_path": "paws-x",
196
+ "dataset_name": "ko",
197
+ "training_split": "train",
198
+ "validation_split": "validation",
199
+ "test_split": "test",
200
+ "doc_to_text": "",
201
+ "doc_to_target": "label",
202
+ "doc_to_choice": "{{[sentence1+\", 맞죠? 예, \"+sentence2, sentence1+\", 맞죠? 아니요, \"+sentence2]}}",
203
+ "description": "",
204
+ "target_delimiter": " ",
205
+ "fewshot_delimiter": "\n\n",
206
+ "metric_list": [
207
+ {
208
+ "metric": "acc",
209
+ "aggregation": "mean",
210
+ "higher_is_better": true
211
+ }
212
+ ],
213
+ "output_type": "multiple_choice",
214
+ "repeats": 1,
215
+ "should_decontaminate": false,
216
+ "metadata": {
217
+ "version": 0.0
218
+ }
219
+ },
220
+ "paws_zh": {
221
+ "task": "paws_zh",
222
+ "group": "pawsx",
223
+ "dataset_path": "paws-x",
224
+ "dataset_name": "zh",
225
+ "training_split": "train",
226
+ "validation_split": "validation",
227
+ "test_split": "test",
228
+ "doc_to_text": "",
229
+ "doc_to_target": "label",
230
+ "doc_to_choice": "{{[sentence1+\", 对吧? 是, \"+sentence2, sentence1+\", 对吧? 不是, \"+sentence2]}}",
231
+ "description": "",
232
+ "target_delimiter": " ",
233
+ "fewshot_delimiter": "\n\n",
234
+ "metric_list": [
235
+ {
236
+ "metric": "acc",
237
+ "aggregation": "mean",
238
+ "higher_is_better": true
239
+ }
240
+ ],
241
+ "output_type": "multiple_choice",
242
+ "repeats": 1,
243
+ "should_decontaminate": false,
244
+ "metadata": {
245
+ "version": 0.0
246
+ }
247
+ }
248
+ },
249
+ "versions": {
250
+ "paws_de": 0.0,
251
+ "paws_en": 0.0,
252
+ "paws_es": 0.0,
253
+ "paws_fr": 0.0,
254
+ "paws_ja": 0.0,
255
+ "paws_ko": 0.0,
256
+ "paws_zh": 0.0,
257
+ "pawsx": "N/A"
258
+ },
259
+ "n-shot": {
260
+ "paws_de": 0,
261
+ "paws_en": 0,
262
+ "paws_es": 0,
263
+ "paws_fr": 0,
264
+ "paws_ja": 0,
265
+ "paws_ko": 0,
266
+ "paws_zh": 0,
267
+ "pawsx": 0
268
+ },
269
+ "config": {
270
+ "model": "hf",
271
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
272
+ "batch_size": "auto",
273
+ "batch_sizes": [
274
+ 64
275
+ ],
276
+ "device": null,
277
+ "use_cache": null,
278
+ "limit": null,
279
+ "bootstrap_iters": 100000,
280
+ "gen_kwargs": null
281
+ },
282
+ "git_hash": "1ee41f7"
283
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/pawsx/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5d9698f108e9c4a08386a4c539ee310c0cf8f16ec27e15a1640e297fde8df5
3
+ size 60320
lm-eval-output/SmerkyG/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "piqa": {
4
+ "acc,none": 0.7110990206746464,
5
+ "acc_stderr,none": 0.010575111841364906,
6
+ "acc_norm,none": 0.7138193688792165,
7
+ "acc_norm_stderr,none": 0.010545318576106643,
8
+ "alias": "piqa"
9
+ }
10
+ },
11
+ "configs": {
12
+ "piqa": {
13
+ "task": "piqa",
14
+ "dataset_path": "piqa",
15
+ "training_split": "train",
16
+ "validation_split": "validation",
17
+ "doc_to_text": "Question: {{goal}}\nAnswer:",
18
+ "doc_to_target": "label",
19
+ "doc_to_choice": "{{[sol1, sol2]}}",
20
+ "description": "",
21
+ "target_delimiter": " ",
22
+ "fewshot_delimiter": "\n\n",
23
+ "metric_list": [
24
+ {
25
+ "metric": "acc",
26
+ "aggregation": "mean",
27
+ "higher_is_better": true
28
+ },
29
+ {
30
+ "metric": "acc_norm",
31
+ "aggregation": "mean",
32
+ "higher_is_better": true
33
+ }
34
+ ],
35
+ "output_type": "multiple_choice",
36
+ "repeats": 1,
37
+ "should_decontaminate": true,
38
+ "doc_to_decontamination_query": "goal",
39
+ "metadata": {
40
+ "version": 1.0
41
+ }
42
+ }
43
+ },
44
+ "versions": {
45
+ "piqa": 1.0
46
+ },
47
+ "n-shot": {
48
+ "piqa": 0
49
+ },
50
+ "config": {
51
+ "model": "hf",
52
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
53
+ "batch_size": "auto",
54
+ "batch_sizes": [
55
+ 64
56
+ ],
57
+ "device": null,
58
+ "use_cache": null,
59
+ "limit": null,
60
+ "bootstrap_iters": 100000,
61
+ "gen_kwargs": null
62
+ },
63
+ "git_hash": "1ee41f7"
64
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/piqa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cf717c165ed5b4ffa6a2c2390c31bf01b678ea3eea2586b1cf8095a63c329ca
3
+ size 37012
lm-eval-output/SmerkyG/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
The diff for this file is too large to render. See raw diff
 
lm-eval-output/SmerkyG/rwkv-5-world-1b5/pythia/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f867471b2e1ce25f12ee05cc4685e1d02cb1dd75ddcae5cd0ca561f8715d5748
3
+ size 463796
lm-eval-output/SmerkyG/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "record": {
4
+ "f1,none": 0.26163523828089236,
5
+ "f1_stderr,none": 0.004364439540718011,
6
+ "em,none": 0.254,
7
+ "em_stderr,none": 0.004353193658626019,
8
+ "alias": "record"
9
+ }
10
+ },
11
+ "configs": {
12
+ "record": {
13
+ "task": "record",
14
+ "group": [
15
+ "super-glue-lm-eval-v1"
16
+ ],
17
+ "dataset_path": "super_glue",
18
+ "dataset_name": "record",
19
+ "training_split": "train",
20
+ "validation_split": "validation",
21
+ "doc_to_text": "def doc_to_text(doc):\n initial_text, *highlights = doc[\"passage\"].strip().split(\"\\n@highlight\\n\")\n text = initial_text + \"\\n\\n\"\n for highlight in highlights:\n text += f\" - {highlight}.\\n\"\n return text\n",
22
+ "doc_to_target": "{{answers}}",
23
+ "doc_to_choice": "{{entities}}",
24
+ "process_results": "def process_results(doc, results):\n # ReCoRD's evaluation is actually deceptively simple:\n # - Pick the maximum likelihood prediction entity\n # - Evaluate the accuracy and token F1 PER EXAMPLE\n # - Average over all examples\n max_idx = np.argmax(np.array([result[0] for result in results]))\n\n prediction = doc[\"entities\"][max_idx]\n gold_label_set = doc[\"answers\"]\n f1 = metric_max_over_ground_truths(\n squad_metrics.compute_f1, prediction, gold_label_set\n )\n em = metric_max_over_ground_truths(\n squad_metrics.compute_exact, prediction, gold_label_set\n )\n\n return {\n \"f1\": f1,\n \"em\": em,\n }\n",
25
+ "description": "",
26
+ "target_delimiter": " ",
27
+ "fewshot_delimiter": "\n\n",
28
+ "metric_list": [
29
+ {
30
+ "metric": "f1",
31
+ "aggregation": "mean"
32
+ },
33
+ {
34
+ "metric": "em",
35
+ "higher_is_better": true,
36
+ "aggregation": "mean"
37
+ }
38
+ ],
39
+ "output_type": "multiple_choice",
40
+ "repeats": 1,
41
+ "should_decontaminate": false,
42
+ "metadata": {
43
+ "version": 1.0
44
+ }
45
+ }
46
+ },
47
+ "versions": {
48
+ "record": 1.0
49
+ },
50
+ "n-shot": {
51
+ "record": 0
52
+ },
53
+ "config": {
54
+ "model": "hf",
55
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
56
+ "batch_size": "auto",
57
+ "batch_sizes": [
58
+ 32
59
+ ],
60
+ "device": null,
61
+ "use_cache": null,
62
+ "limit": null,
63
+ "bootstrap_iters": 100000,
64
+ "gen_kwargs": null
65
+ },
66
+ "git_hash": "1ee41f7"
67
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/record/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59c4287671b2933f98fdef86dd3bb0d0138e9b3ff34bc78708047d70497620c3
3
+ size 66555
lm-eval-output/SmerkyG/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "sciq": {
4
+ "acc,none": 0.899,
5
+ "acc_stderr,none": 0.009533618929340983,
6
+ "acc_norm,none": 0.853,
7
+ "acc_norm_stderr,none": 0.011203415395160335,
8
+ "alias": "sciq"
9
+ }
10
+ },
11
+ "configs": {
12
+ "sciq": {
13
+ "task": "sciq",
14
+ "dataset_path": "sciq",
15
+ "training_split": "train",
16
+ "validation_split": "validation",
17
+ "test_split": "test",
18
+ "doc_to_text": "{{support.lstrip()}}\nQuestion: {{question}}\nAnswer:",
19
+ "doc_to_target": 3,
20
+ "doc_to_choice": "{{[distractor1, distractor2, distractor3, correct_answer]}}",
21
+ "description": "",
22
+ "target_delimiter": " ",
23
+ "fewshot_delimiter": "\n\n",
24
+ "metric_list": [
25
+ {
26
+ "metric": "acc",
27
+ "aggregation": "mean",
28
+ "higher_is_better": true
29
+ },
30
+ {
31
+ "metric": "acc_norm",
32
+ "aggregation": "mean",
33
+ "higher_is_better": true
34
+ }
35
+ ],
36
+ "output_type": "multiple_choice",
37
+ "repeats": 1,
38
+ "should_decontaminate": true,
39
+ "doc_to_decontamination_query": "{{support}} {{question}}",
40
+ "metadata": {
41
+ "version": 1.0
42
+ }
43
+ }
44
+ },
45
+ "versions": {
46
+ "sciq": 1.0
47
+ },
48
+ "n-shot": {
49
+ "sciq": 0
50
+ },
51
+ "config": {
52
+ "model": "hf",
53
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
54
+ "batch_size": "auto",
55
+ "batch_sizes": [
56
+ 64
57
+ ],
58
+ "device": null,
59
+ "use_cache": null,
60
+ "limit": null,
61
+ "bootstrap_iters": 100000,
62
+ "gen_kwargs": null
63
+ },
64
+ "git_hash": "1ee41f7"
65
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/sciq/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:645ee047914143613682070fde4ab80b202381552292a4937a2aaa9963b8e56f
3
+ size 45089
lm-eval-output/SmerkyG/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "winogrande": {
4
+ "acc,none": 0.5935280189423836,
5
+ "acc_stderr,none": 0.013804448697753378,
6
+ "alias": "winogrande"
7
+ }
8
+ },
9
+ "configs": {
10
+ "winogrande": {
11
+ "task": "winogrande",
12
+ "dataset_path": "winogrande",
13
+ "dataset_name": "winogrande_xl",
14
+ "training_split": "train",
15
+ "validation_split": "validation",
16
+ "doc_to_text": "def doc_to_text(doc):\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
17
+ "doc_to_target": "def doc_to_target(doc):\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
18
+ "doc_to_choice": "def doc_to_choice(doc):\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
19
+ "description": "",
20
+ "target_delimiter": " ",
21
+ "fewshot_delimiter": "\n\n",
22
+ "metric_list": [
23
+ {
24
+ "metric": "acc",
25
+ "aggregation": "mean",
26
+ "higher_is_better": true
27
+ }
28
+ ],
29
+ "output_type": "multiple_choice",
30
+ "repeats": 1,
31
+ "should_decontaminate": true,
32
+ "doc_to_decontamination_query": "sentence",
33
+ "metadata": {
34
+ "version": 1.0
35
+ }
36
+ }
37
+ },
38
+ "versions": {
39
+ "winogrande": 1.0
40
+ },
41
+ "n-shot": {
42
+ "winogrande": 0
43
+ },
44
+ "config": {
45
+ "model": "hf",
46
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
47
+ "batch_size": "auto",
48
+ "batch_sizes": [
49
+ 64
50
+ ],
51
+ "device": null,
52
+ "use_cache": null,
53
+ "limit": null,
54
+ "bootstrap_iters": 100000,
55
+ "gen_kwargs": null
56
+ },
57
+ "git_hash": "1ee41f7"
58
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/winogrande/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d134dd27739241cc1f7433415c1a7eecad07fae73514e0108e8948179a432203
3
+ size 37250
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,390 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "xcopa": {
4
+ "acc,none": 0.5787272727272728,
5
+ "acc_stderr,none": 0.04424725212711732,
6
+ "alias": "xcopa"
7
+ },
8
+ "xcopa_et": {
9
+ "acc,none": 0.57,
10
+ "acc_stderr,none": 0.02216263442665284,
11
+ "alias": " - xcopa_et"
12
+ },
13
+ "xcopa_ht": {
14
+ "acc,none": 0.508,
15
+ "acc_stderr,none": 0.022380208834928028,
16
+ "alias": " - xcopa_ht"
17
+ },
18
+ "xcopa_id": {
19
+ "acc,none": 0.636,
20
+ "acc_stderr,none": 0.021539170637317688,
21
+ "alias": " - xcopa_id"
22
+ },
23
+ "xcopa_it": {
24
+ "acc,none": 0.638,
25
+ "acc_stderr,none": 0.021513662527582404,
26
+ "alias": " - xcopa_it"
27
+ },
28
+ "xcopa_qu": {
29
+ "acc,none": 0.518,
30
+ "acc_stderr,none": 0.02236856511738799,
31
+ "alias": " - xcopa_qu"
32
+ },
33
+ "xcopa_sw": {
34
+ "acc,none": 0.562,
35
+ "acc_stderr,none": 0.022210326363977417,
36
+ "alias": " - xcopa_sw"
37
+ },
38
+ "xcopa_ta": {
39
+ "acc,none": 0.544,
40
+ "acc_stderr,none": 0.022296238348407056,
41
+ "alias": " - xcopa_ta"
42
+ },
43
+ "xcopa_th": {
44
+ "acc,none": 0.566,
45
+ "acc_stderr,none": 0.022187215803029008,
46
+ "alias": " - xcopa_th"
47
+ },
48
+ "xcopa_tr": {
49
+ "acc,none": 0.56,
50
+ "acc_stderr,none": 0.022221331534143036,
51
+ "alias": " - xcopa_tr"
52
+ },
53
+ "xcopa_vi": {
54
+ "acc,none": 0.612,
55
+ "acc_stderr,none": 0.02181430098478764,
56
+ "alias": " - xcopa_vi"
57
+ },
58
+ "xcopa_zh": {
59
+ "acc,none": 0.652,
60
+ "acc_stderr,none": 0.0213237286328075,
61
+ "alias": " - xcopa_zh"
62
+ }
63
+ },
64
+ "groups": {
65
+ "xcopa": {
66
+ "acc,none": 0.5787272727272728,
67
+ "acc_stderr,none": 0.04424725212711732,
68
+ "alias": "xcopa"
69
+ }
70
+ },
71
+ "configs": {
72
+ "xcopa_et": {
73
+ "task": "xcopa_et",
74
+ "group": "xcopa",
75
+ "dataset_path": "xcopa",
76
+ "dataset_name": "et",
77
+ "validation_split": "validation",
78
+ "test_split": "test",
79
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a4dd9d00>, connector={'cause': 'sest', 'effect': 'seetõttu'})",
80
+ "doc_to_target": "label",
81
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
82
+ "description": "",
83
+ "target_delimiter": " ",
84
+ "fewshot_delimiter": "\n\n",
85
+ "metric_list": [
86
+ {
87
+ "metric": "acc"
88
+ }
89
+ ],
90
+ "output_type": "multiple_choice",
91
+ "repeats": 1,
92
+ "should_decontaminate": false,
93
+ "metadata": {
94
+ "version": 1.0
95
+ }
96
+ },
97
+ "xcopa_ht": {
98
+ "task": "xcopa_ht",
99
+ "group": "xcopa",
100
+ "dataset_path": "xcopa",
101
+ "dataset_name": "ht",
102
+ "validation_split": "validation",
103
+ "test_split": "test",
104
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d3f7e0>, connector={'cause': 'poukisa', 'effect': 'donk sa'})",
105
+ "doc_to_target": "label",
106
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
107
+ "description": "",
108
+ "target_delimiter": " ",
109
+ "fewshot_delimiter": "\n\n",
110
+ "metric_list": [
111
+ {
112
+ "metric": "acc"
113
+ }
114
+ ],
115
+ "output_type": "multiple_choice",
116
+ "repeats": 1,
117
+ "should_decontaminate": false,
118
+ "metadata": {
119
+ "version": 1.0
120
+ }
121
+ },
122
+ "xcopa_id": {
123
+ "task": "xcopa_id",
124
+ "group": "xcopa",
125
+ "dataset_path": "xcopa",
126
+ "dataset_name": "id",
127
+ "validation_split": "validation",
128
+ "test_split": "test",
129
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d3d4e0>, connector={'cause': 'karena', 'effect': 'maka'})",
130
+ "doc_to_target": "label",
131
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
132
+ "description": "",
133
+ "target_delimiter": " ",
134
+ "fewshot_delimiter": "\n\n",
135
+ "metric_list": [
136
+ {
137
+ "metric": "acc"
138
+ }
139
+ ],
140
+ "output_type": "multiple_choice",
141
+ "repeats": 1,
142
+ "should_decontaminate": false,
143
+ "metadata": {
144
+ "version": 1.0
145
+ }
146
+ },
147
+ "xcopa_it": {
148
+ "task": "xcopa_it",
149
+ "group": "xcopa",
150
+ "dataset_path": "xcopa",
151
+ "dataset_name": "it",
152
+ "validation_split": "validation",
153
+ "test_split": "test",
154
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d132e0>, connector={'cause': 'perché', 'effect': 'quindi'})",
155
+ "doc_to_target": "label",
156
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
157
+ "description": "",
158
+ "target_delimiter": " ",
159
+ "fewshot_delimiter": "\n\n",
160
+ "metric_list": [
161
+ {
162
+ "metric": "acc"
163
+ }
164
+ ],
165
+ "output_type": "multiple_choice",
166
+ "repeats": 1,
167
+ "should_decontaminate": false,
168
+ "metadata": {
169
+ "version": 1.0
170
+ }
171
+ },
172
+ "xcopa_qu": {
173
+ "task": "xcopa_qu",
174
+ "group": "xcopa",
175
+ "dataset_path": "xcopa",
176
+ "dataset_name": "qu",
177
+ "validation_split": "validation",
178
+ "test_split": "test",
179
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d3eac0>, connector={'cause': 'imataq', 'effect': 'chaymi'})",
180
+ "doc_to_target": "label",
181
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
182
+ "description": "",
183
+ "target_delimiter": " ",
184
+ "fewshot_delimiter": "\n\n",
185
+ "metric_list": [
186
+ {
187
+ "metric": "acc"
188
+ }
189
+ ],
190
+ "output_type": "multiple_choice",
191
+ "repeats": 1,
192
+ "should_decontaminate": false,
193
+ "metadata": {
194
+ "version": 1.0
195
+ }
196
+ },
197
+ "xcopa_sw": {
198
+ "task": "xcopa_sw",
199
+ "group": "xcopa",
200
+ "dataset_path": "xcopa",
201
+ "dataset_name": "sw",
202
+ "validation_split": "validation",
203
+ "test_split": "test",
204
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d12d40>, connector={'cause': 'kwa sababu', 'effect': 'kwa hiyo'})",
205
+ "doc_to_target": "label",
206
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
207
+ "description": "",
208
+ "target_delimiter": " ",
209
+ "fewshot_delimiter": "\n\n",
210
+ "metric_list": [
211
+ {
212
+ "metric": "acc"
213
+ }
214
+ ],
215
+ "output_type": "multiple_choice",
216
+ "repeats": 1,
217
+ "should_decontaminate": false,
218
+ "metadata": {
219
+ "version": 1.0
220
+ }
221
+ },
222
+ "xcopa_ta": {
223
+ "task": "xcopa_ta",
224
+ "group": "xcopa",
225
+ "dataset_path": "xcopa",
226
+ "dataset_name": "ta",
227
+ "validation_split": "validation",
228
+ "test_split": "test",
229
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87c0271300>, connector={'cause': 'காரணமாக', 'effect': 'எனவே'})",
230
+ "doc_to_target": "label",
231
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
232
+ "description": "",
233
+ "target_delimiter": " ",
234
+ "fewshot_delimiter": "\n\n",
235
+ "metric_list": [
236
+ {
237
+ "metric": "acc"
238
+ }
239
+ ],
240
+ "output_type": "multiple_choice",
241
+ "repeats": 1,
242
+ "should_decontaminate": false,
243
+ "metadata": {
244
+ "version": 1.0
245
+ }
246
+ },
247
+ "xcopa_th": {
248
+ "task": "xcopa_th",
249
+ "group": "xcopa",
250
+ "dataset_path": "xcopa",
251
+ "dataset_name": "th",
252
+ "validation_split": "validation",
253
+ "test_split": "test",
254
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d13060>, connector={'cause': 'เพราะ', 'effect': 'ดังนั้น'})",
255
+ "doc_to_target": "label",
256
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
257
+ "description": "",
258
+ "target_delimiter": " ",
259
+ "fewshot_delimiter": "\n\n",
260
+ "metric_list": [
261
+ {
262
+ "metric": "acc"
263
+ }
264
+ ],
265
+ "output_type": "multiple_choice",
266
+ "repeats": 1,
267
+ "should_decontaminate": false,
268
+ "metadata": {
269
+ "version": 1.0
270
+ }
271
+ },
272
+ "xcopa_tr": {
273
+ "task": "xcopa_tr",
274
+ "group": "xcopa",
275
+ "dataset_path": "xcopa",
276
+ "dataset_name": "tr",
277
+ "validation_split": "validation",
278
+ "test_split": "test",
279
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f86ebe98b80>, connector={'cause': 'çünkü', 'effect': 'bu yüzden'})",
280
+ "doc_to_target": "label",
281
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
282
+ "description": "",
283
+ "target_delimiter": " ",
284
+ "fewshot_delimiter": "\n\n",
285
+ "metric_list": [
286
+ {
287
+ "metric": "acc"
288
+ }
289
+ ],
290
+ "output_type": "multiple_choice",
291
+ "repeats": 1,
292
+ "should_decontaminate": false,
293
+ "metadata": {
294
+ "version": 1.0
295
+ }
296
+ },
297
+ "xcopa_vi": {
298
+ "task": "xcopa_vi",
299
+ "group": "xcopa",
300
+ "dataset_path": "xcopa",
301
+ "dataset_name": "vi",
302
+ "validation_split": "validation",
303
+ "test_split": "test",
304
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d3c9a0>, connector={'cause': 'bởi vì', 'effect': 'vì vậy'})",
305
+ "doc_to_target": "label",
306
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
307
+ "description": "",
308
+ "target_delimiter": " ",
309
+ "fewshot_delimiter": "\n\n",
310
+ "metric_list": [
311
+ {
312
+ "metric": "acc"
313
+ }
314
+ ],
315
+ "output_type": "multiple_choice",
316
+ "repeats": 1,
317
+ "should_decontaminate": false,
318
+ "metadata": {
319
+ "version": 1.0
320
+ }
321
+ },
322
+ "xcopa_zh": {
323
+ "task": "xcopa_zh",
324
+ "group": "xcopa",
325
+ "dataset_path": "xcopa",
326
+ "dataset_name": "zh",
327
+ "validation_split": "validation",
328
+ "test_split": "test",
329
+ "doc_to_text": "functools.partial(<function doc_to_text at 0x7f87a7d122a0>, connector={'cause': '因为', 'effect': '所以'})",
330
+ "doc_to_target": "label",
331
+ "doc_to_choice": "def doc_to_choice(doc):\n return [convert_choice(doc[\"choice1\"]), convert_choice(doc[\"choice2\"])]\n",
332
+ "description": "",
333
+ "target_delimiter": " ",
334
+ "fewshot_delimiter": "\n\n",
335
+ "metric_list": [
336
+ {
337
+ "metric": "acc"
338
+ }
339
+ ],
340
+ "output_type": "multiple_choice",
341
+ "repeats": 1,
342
+ "should_decontaminate": false,
343
+ "metadata": {
344
+ "version": 1.0
345
+ }
346
+ }
347
+ },
348
+ "versions": {
349
+ "xcopa": "N/A",
350
+ "xcopa_et": 1.0,
351
+ "xcopa_ht": 1.0,
352
+ "xcopa_id": 1.0,
353
+ "xcopa_it": 1.0,
354
+ "xcopa_qu": 1.0,
355
+ "xcopa_sw": 1.0,
356
+ "xcopa_ta": 1.0,
357
+ "xcopa_th": 1.0,
358
+ "xcopa_tr": 1.0,
359
+ "xcopa_vi": 1.0,
360
+ "xcopa_zh": 1.0
361
+ },
362
+ "n-shot": {
363
+ "xcopa": 0,
364
+ "xcopa_et": 0,
365
+ "xcopa_ht": 0,
366
+ "xcopa_id": 0,
367
+ "xcopa_it": 0,
368
+ "xcopa_qu": 0,
369
+ "xcopa_sw": 0,
370
+ "xcopa_ta": 0,
371
+ "xcopa_th": 0,
372
+ "xcopa_tr": 0,
373
+ "xcopa_vi": 0,
374
+ "xcopa_zh": 0
375
+ },
376
+ "config": {
377
+ "model": "hf",
378
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
379
+ "batch_size": "auto",
380
+ "batch_sizes": [
381
+ 64
382
+ ],
383
+ "device": null,
384
+ "use_cache": null,
385
+ "limit": null,
386
+ "bootstrap_iters": 100000,
387
+ "gen_kwargs": null
388
+ },
389
+ "git_hash": "1ee41f7"
390
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xcopa/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd8dad5dc3ea2c5d0fd99e6decbe8ee0bf0ad0f41f4f95b6bc45a59173c6506c
3
+ size 79942
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,548 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "xnli": {
4
+ "acc,none": 0.4044979919678715,
5
+ "acc_stderr,none": 0.04620022346504284,
6
+ "alias": "xnli"
7
+ },
8
+ "xnli_ar": {
9
+ "acc,none": 0.3345381526104418,
10
+ "acc_stderr,none": 0.009457404390939166,
11
+ "alias": " - xnli_ar"
12
+ },
13
+ "xnli_bg": {
14
+ "acc,none": 0.42610441767068274,
15
+ "acc_stderr,none": 0.009912016377459067,
16
+ "alias": " - xnli_bg"
17
+ },
18
+ "xnli_de": {
19
+ "acc,none": 0.44859437751004017,
20
+ "acc_stderr,none": 0.009968964736894263,
21
+ "alias": " - xnli_de"
22
+ },
23
+ "xnli_el": {
24
+ "acc,none": 0.37349397590361444,
25
+ "acc_stderr,none": 0.00969598596221976,
26
+ "alias": " - xnli_el"
27
+ },
28
+ "xnli_en": {
29
+ "acc,none": 0.5108433734939759,
30
+ "acc_stderr,none": 0.010019715824483473,
31
+ "alias": " - xnli_en"
32
+ },
33
+ "xnli_es": {
34
+ "acc,none": 0.4566265060240964,
35
+ "acc_stderr,none": 0.009984293410840315,
36
+ "alias": " - xnli_es"
37
+ },
38
+ "xnli_fr": {
39
+ "acc,none": 0.457429718875502,
40
+ "acc_stderr,none": 0.009985682220227464,
41
+ "alias": " - xnli_fr"
42
+ },
43
+ "xnli_hi": {
44
+ "acc,none": 0.3682730923694779,
45
+ "acc_stderr,none": 0.009668013178998446,
46
+ "alias": " - xnli_hi"
47
+ },
48
+ "xnli_ru": {
49
+ "acc,none": 0.4493975903614458,
50
+ "acc_stderr,none": 0.009970615649588139,
51
+ "alias": " - xnli_ru"
52
+ },
53
+ "xnli_sw": {
54
+ "acc,none": 0.3357429718875502,
55
+ "acc_stderr,none": 0.009465838617337356,
56
+ "alias": " - xnli_sw"
57
+ },
58
+ "xnli_th": {
59
+ "acc,none": 0.38473895582329315,
60
+ "acc_stderr,none": 0.00975214930715253,
61
+ "alias": " - xnli_th"
62
+ },
63
+ "xnli_tr": {
64
+ "acc,none": 0.39799196787148594,
65
+ "acc_stderr,none": 0.009811284026425582,
66
+ "alias": " - xnli_tr"
67
+ },
68
+ "xnli_ur": {
69
+ "acc,none": 0.3506024096385542,
70
+ "acc_stderr,none": 0.009564237156206098,
71
+ "alias": " - xnli_ur"
72
+ },
73
+ "xnli_vi": {
74
+ "acc,none": 0.43052208835341366,
75
+ "acc_stderr,none": 0.009924844537285524,
76
+ "alias": " - xnli_vi"
77
+ },
78
+ "xnli_zh": {
79
+ "acc,none": 0.342570281124498,
80
+ "acc_stderr,none": 0.009512333319470373,
81
+ "alias": " - xnli_zh"
82
+ }
83
+ },
84
+ "groups": {
85
+ "xnli": {
86
+ "acc,none": 0.4044979919678715,
87
+ "acc_stderr,none": 0.04620022346504284,
88
+ "alias": "xnli"
89
+ }
90
+ },
91
+ "configs": {
92
+ "xnli_ar": {
93
+ "task": "xnli_ar",
94
+ "group": "xnli",
95
+ "dataset_path": "xnli",
96
+ "dataset_name": "ar",
97
+ "training_split": "train",
98
+ "validation_split": "validation",
99
+ "doc_to_text": "",
100
+ "doc_to_target": "label",
101
+ "doc_to_choice": "{{[premise+\", صحيح? نعم, \"+hypothesis,premise+\", صحيح? لذا, \"+hypothesis,premise+\", صحيح? رقم, \"+hypothesis]}}",
102
+ "description": "",
103
+ "target_delimiter": " ",
104
+ "fewshot_delimiter": "\n\n",
105
+ "metric_list": [
106
+ {
107
+ "metric": "acc",
108
+ "aggregation": "mean",
109
+ "higher_is_better": true
110
+ }
111
+ ],
112
+ "output_type": "multiple_choice",
113
+ "repeats": 1,
114
+ "should_decontaminate": false,
115
+ "metadata": {
116
+ "version": 1.0
117
+ }
118
+ },
119
+ "xnli_bg": {
120
+ "task": "xnli_bg",
121
+ "group": "xnli",
122
+ "dataset_path": "xnli",
123
+ "dataset_name": "bg",
124
+ "training_split": "train",
125
+ "validation_split": "validation",
126
+ "doc_to_text": "",
127
+ "doc_to_target": "label",
128
+ "doc_to_choice": "{{[premise+\", правилно? да, \"+hypothesis,premise+\", правилно? така, \"+hypothesis,premise+\", правилно? не, \"+hypothesis]}}",
129
+ "description": "",
130
+ "target_delimiter": " ",
131
+ "fewshot_delimiter": "\n\n",
132
+ "metric_list": [
133
+ {
134
+ "metric": "acc",
135
+ "aggregation": "mean",
136
+ "higher_is_better": true
137
+ }
138
+ ],
139
+ "output_type": "multiple_choice",
140
+ "repeats": 1,
141
+ "should_decontaminate": false,
142
+ "metadata": {
143
+ "version": 1.0
144
+ }
145
+ },
146
+ "xnli_de": {
147
+ "task": "xnli_de",
148
+ "group": "xnli",
149
+ "dataset_path": "xnli",
150
+ "dataset_name": "de",
151
+ "training_split": "train",
152
+ "validation_split": "validation",
153
+ "doc_to_text": "",
154
+ "doc_to_target": "label",
155
+ "doc_to_choice": "{{[premise+\", richtig? Ja, \"+hypothesis,premise+\", richtig? Auch, \"+hypothesis,premise+\", richtig? Nein, \"+hypothesis]}}",
156
+ "description": "",
157
+ "target_delimiter": " ",
158
+ "fewshot_delimiter": "\n\n",
159
+ "metric_list": [
160
+ {
161
+ "metric": "acc",
162
+ "aggregation": "mean",
163
+ "higher_is_better": true
164
+ }
165
+ ],
166
+ "output_type": "multiple_choice",
167
+ "repeats": 1,
168
+ "should_decontaminate": false,
169
+ "metadata": {
170
+ "version": 1.0
171
+ }
172
+ },
173
+ "xnli_el": {
174
+ "task": "xnli_el",
175
+ "group": "xnli",
176
+ "dataset_path": "xnli",
177
+ "dataset_name": "el",
178
+ "training_split": "train",
179
+ "validation_split": "validation",
180
+ "doc_to_text": "",
181
+ "doc_to_target": "label",
182
+ "doc_to_choice": "{{[premise+\", σωστός? Ναί, \"+hypothesis,premise+\", σωστός? Έτσι, \"+hypothesis,premise+\", σωστός? όχι, \"+hypothesis]}}",
183
+ "description": "",
184
+ "target_delimiter": " ",
185
+ "fewshot_delimiter": "\n\n",
186
+ "metric_list": [
187
+ {
188
+ "metric": "acc",
189
+ "aggregation": "mean",
190
+ "higher_is_better": true
191
+ }
192
+ ],
193
+ "output_type": "multiple_choice",
194
+ "repeats": 1,
195
+ "should_decontaminate": false,
196
+ "metadata": {
197
+ "version": 1.0
198
+ }
199
+ },
200
+ "xnli_en": {
201
+ "task": "xnli_en",
202
+ "group": "xnli",
203
+ "dataset_path": "xnli",
204
+ "dataset_name": "en",
205
+ "training_split": "train",
206
+ "validation_split": "validation",
207
+ "doc_to_text": "",
208
+ "doc_to_target": "label",
209
+ "doc_to_choice": "{{[premise+\", right? Yes, \"+hypothesis,premise+\", right? Also, \"+hypothesis,premise+\", right? No, \"+hypothesis]}}",
210
+ "description": "",
211
+ "target_delimiter": " ",
212
+ "fewshot_delimiter": "\n\n",
213
+ "metric_list": [
214
+ {
215
+ "metric": "acc",
216
+ "aggregation": "mean",
217
+ "higher_is_better": true
218
+ }
219
+ ],
220
+ "output_type": "multiple_choice",
221
+ "repeats": 1,
222
+ "should_decontaminate": false,
223
+ "metadata": {
224
+ "version": 1.0
225
+ }
226
+ },
227
+ "xnli_es": {
228
+ "task": "xnli_es",
229
+ "group": "xnli",
230
+ "dataset_path": "xnli",
231
+ "dataset_name": "es",
232
+ "training_split": "train",
233
+ "validation_split": "validation",
234
+ "doc_to_text": "",
235
+ "doc_to_target": "label",
236
+ "doc_to_choice": "{{[premise+\", correcto? Sí, \"+hypothesis,premise+\", correcto? Asi que, \"+hypothesis,premise+\", correcto? No, \"+hypothesis]}}",
237
+ "description": "",
238
+ "target_delimiter": " ",
239
+ "fewshot_delimiter": "\n\n",
240
+ "metric_list": [
241
+ {
242
+ "metric": "acc",
243
+ "aggregation": "mean",
244
+ "higher_is_better": true
245
+ }
246
+ ],
247
+ "output_type": "multiple_choice",
248
+ "repeats": 1,
249
+ "should_decontaminate": false,
250
+ "metadata": {
251
+ "version": 1.0
252
+ }
253
+ },
254
+ "xnli_fr": {
255
+ "task": "xnli_fr",
256
+ "group": "xnli",
257
+ "dataset_path": "xnli",
258
+ "dataset_name": "fr",
259
+ "training_split": "train",
260
+ "validation_split": "validation",
261
+ "doc_to_text": "",
262
+ "doc_to_target": "label",
263
+ "doc_to_choice": "{{[premise+\", correct? Oui, \"+hypothesis,premise+\", correct? Aussi, \"+hypothesis,premise+\", correct? Non, \"+hypothesis]}}",
264
+ "description": "",
265
+ "target_delimiter": " ",
266
+ "fewshot_delimiter": "\n\n",
267
+ "metric_list": [
268
+ {
269
+ "metric": "acc",
270
+ "aggregation": "mean",
271
+ "higher_is_better": true
272
+ }
273
+ ],
274
+ "output_type": "multiple_choice",
275
+ "repeats": 1,
276
+ "should_decontaminate": false,
277
+ "metadata": {
278
+ "version": 1.0
279
+ }
280
+ },
281
+ "xnli_hi": {
282
+ "task": "xnli_hi",
283
+ "group": "xnli",
284
+ "dataset_path": "xnli",
285
+ "dataset_name": "hi",
286
+ "training_split": "train",
287
+ "validation_split": "validation",
288
+ "doc_to_text": "",
289
+ "doc_to_target": "label",
290
+ "doc_to_choice": "{{[premise+\", सही? हाँ, \"+hypothesis,premise+\", सही? इसलिए, \"+hypothesis,premise+\", सही? नहीं, \"+hypothesis]}}",
291
+ "description": "",
292
+ "target_delimiter": " ",
293
+ "fewshot_delimiter": "\n\n",
294
+ "metric_list": [
295
+ {
296
+ "metric": "acc",
297
+ "aggregation": "mean",
298
+ "higher_is_better": true
299
+ }
300
+ ],
301
+ "output_type": "multiple_choice",
302
+ "repeats": 1,
303
+ "should_decontaminate": false,
304
+ "metadata": {
305
+ "version": 1.0
306
+ }
307
+ },
308
+ "xnli_ru": {
309
+ "task": "xnli_ru",
310
+ "group": "xnli",
311
+ "dataset_path": "xnli",
312
+ "dataset_name": "ru",
313
+ "training_split": "train",
314
+ "validation_split": "validation",
315
+ "doc_to_text": "",
316
+ "doc_to_target": "label",
317
+ "doc_to_choice": "{{[premise+\", правильно? Да, \"+hypothesis,premise+\", правильно? Так, \"+hypothesis,premise+\", правильно? Нет, \"+hypothesis]}}",
318
+ "description": "",
319
+ "target_delimiter": " ",
320
+ "fewshot_delimiter": "\n\n",
321
+ "metric_list": [
322
+ {
323
+ "metric": "acc",
324
+ "aggregation": "mean",
325
+ "higher_is_better": true
326
+ }
327
+ ],
328
+ "output_type": "multiple_choice",
329
+ "repeats": 1,
330
+ "should_decontaminate": false,
331
+ "metadata": {
332
+ "version": 1.0
333
+ }
334
+ },
335
+ "xnli_sw": {
336
+ "task": "xnli_sw",
337
+ "group": "xnli",
338
+ "dataset_path": "xnli",
339
+ "dataset_name": "sw",
340
+ "training_split": "train",
341
+ "validation_split": "validation",
342
+ "doc_to_text": "",
343
+ "doc_to_target": "label",
344
+ "doc_to_choice": "{{[premise+\", sahihi? Ndiyo, \"+hypothesis,premise+\", sahihi? Hivyo, \"+hypothesis,premise+\", sahihi? Hapana, \"+hypothesis]}}",
345
+ "description": "",
346
+ "target_delimiter": " ",
347
+ "fewshot_delimiter": "\n\n",
348
+ "metric_list": [
349
+ {
350
+ "metric": "acc",
351
+ "aggregation": "mean",
352
+ "higher_is_better": true
353
+ }
354
+ ],
355
+ "output_type": "multiple_choice",
356
+ "repeats": 1,
357
+ "should_decontaminate": false,
358
+ "metadata": {
359
+ "version": 1.0
360
+ }
361
+ },
362
+ "xnli_th": {
363
+ "task": "xnli_th",
364
+ "group": "xnli",
365
+ "dataset_path": "xnli",
366
+ "dataset_name": "th",
367
+ "training_split": "train",
368
+ "validation_split": "validation",
369
+ "doc_to_text": "",
370
+ "doc_to_target": "label",
371
+ "doc_to_choice": "{{[premise+\", ถูกต้อง? ใช่, \"+hypothesis,premise+\", ถูกต้อง? ดังนั้น, \"+hypothesis,premise+\", ถูกต้อง? ไม่, \"+hypothesis]}}",
372
+ "description": "",
373
+ "target_delimiter": " ",
374
+ "fewshot_delimiter": "\n\n",
375
+ "metric_list": [
376
+ {
377
+ "metric": "acc",
378
+ "aggregation": "mean",
379
+ "higher_is_better": true
380
+ }
381
+ ],
382
+ "output_type": "multiple_choice",
383
+ "repeats": 1,
384
+ "should_decontaminate": false,
385
+ "metadata": {
386
+ "version": 1.0
387
+ }
388
+ },
389
+ "xnli_tr": {
390
+ "task": "xnli_tr",
391
+ "group": "xnli",
392
+ "dataset_path": "xnli",
393
+ "dataset_name": "tr",
394
+ "training_split": "train",
395
+ "validation_split": "validation",
396
+ "doc_to_text": "",
397
+ "doc_to_target": "label",
398
+ "doc_to_choice": "{{[premise+\", doğru? Evet, \"+hypothesis,premise+\", doğru? Böylece, \"+hypothesis,premise+\", doğru? Hayır, \"+hypothesis]}}",
399
+ "description": "",
400
+ "target_delimiter": " ",
401
+ "fewshot_delimiter": "\n\n",
402
+ "metric_list": [
403
+ {
404
+ "metric": "acc",
405
+ "aggregation": "mean",
406
+ "higher_is_better": true
407
+ }
408
+ ],
409
+ "output_type": "multiple_choice",
410
+ "repeats": 1,
411
+ "should_decontaminate": false,
412
+ "metadata": {
413
+ "version": 1.0
414
+ }
415
+ },
416
+ "xnli_ur": {
417
+ "task": "xnli_ur",
418
+ "group": "xnli",
419
+ "dataset_path": "xnli",
420
+ "dataset_name": "ur",
421
+ "training_split": "train",
422
+ "validation_split": "validation",
423
+ "doc_to_text": "",
424
+ "doc_to_target": "label",
425
+ "doc_to_choice": "{{[premise+\", صحیح? جی ہاں, \"+hypothesis,premise+\", صحیح? اس لئے, \"+hypothesis,premise+\", صحیح? نہیں, \"+hypothesis]}}",
426
+ "description": "",
427
+ "target_delimiter": " ",
428
+ "fewshot_delimiter": "\n\n",
429
+ "metric_list": [
430
+ {
431
+ "metric": "acc",
432
+ "aggregation": "mean",
433
+ "higher_is_better": true
434
+ }
435
+ ],
436
+ "output_type": "multiple_choice",
437
+ "repeats": 1,
438
+ "should_decontaminate": false,
439
+ "metadata": {
440
+ "version": 1.0
441
+ }
442
+ },
443
+ "xnli_vi": {
444
+ "task": "xnli_vi",
445
+ "group": "xnli",
446
+ "dataset_path": "xnli",
447
+ "dataset_name": "vi",
448
+ "training_split": "train",
449
+ "validation_split": "validation",
450
+ "doc_to_text": "",
451
+ "doc_to_target": "label",
452
+ "doc_to_choice": "{{[premise+\", đúng? Vâng, \"+hypothesis,premise+\", đúng? Vì vậy, \"+hypothesis,premise+\", đúng? Không, \"+hypothesis]}}",
453
+ "description": "",
454
+ "target_delimiter": " ",
455
+ "fewshot_delimiter": "\n\n",
456
+ "metric_list": [
457
+ {
458
+ "metric": "acc",
459
+ "aggregation": "mean",
460
+ "higher_is_better": true
461
+ }
462
+ ],
463
+ "output_type": "multiple_choice",
464
+ "repeats": 1,
465
+ "should_decontaminate": false,
466
+ "metadata": {
467
+ "version": 1.0
468
+ }
469
+ },
470
+ "xnli_zh": {
471
+ "task": "xnli_zh",
472
+ "group": "xnli",
473
+ "dataset_path": "xnli",
474
+ "dataset_name": "zh",
475
+ "training_split": "train",
476
+ "validation_split": "validation",
477
+ "doc_to_text": "",
478
+ "doc_to_target": "label",
479
+ "doc_to_choice": "{{[premise+\", 正确? 是的, \"+hypothesis,premise+\", 正确? 所以, \"+hypothesis,premise+\", 正确? 不是的, \"+hypothesis]}}",
480
+ "description": "",
481
+ "target_delimiter": " ",
482
+ "fewshot_delimiter": "\n\n",
483
+ "metric_list": [
484
+ {
485
+ "metric": "acc",
486
+ "aggregation": "mean",
487
+ "higher_is_better": true
488
+ }
489
+ ],
490
+ "output_type": "multiple_choice",
491
+ "repeats": 1,
492
+ "should_decontaminate": false,
493
+ "metadata": {
494
+ "version": 1.0
495
+ }
496
+ }
497
+ },
498
+ "versions": {
499
+ "xnli": "N/A",
500
+ "xnli_ar": 1.0,
501
+ "xnli_bg": 1.0,
502
+ "xnli_de": 1.0,
503
+ "xnli_el": 1.0,
504
+ "xnli_en": 1.0,
505
+ "xnli_es": 1.0,
506
+ "xnli_fr": 1.0,
507
+ "xnli_hi": 1.0,
508
+ "xnli_ru": 1.0,
509
+ "xnli_sw": 1.0,
510
+ "xnli_th": 1.0,
511
+ "xnli_tr": 1.0,
512
+ "xnli_ur": 1.0,
513
+ "xnli_vi": 1.0,
514
+ "xnli_zh": 1.0
515
+ },
516
+ "n-shot": {
517
+ "xnli": 0,
518
+ "xnli_ar": 0,
519
+ "xnli_bg": 0,
520
+ "xnli_de": 0,
521
+ "xnli_el": 0,
522
+ "xnli_en": 0,
523
+ "xnli_es": 0,
524
+ "xnli_fr": 0,
525
+ "xnli_hi": 0,
526
+ "xnli_ru": 0,
527
+ "xnli_sw": 0,
528
+ "xnli_th": 0,
529
+ "xnli_tr": 0,
530
+ "xnli_ur": 0,
531
+ "xnli_vi": 0,
532
+ "xnli_zh": 0
533
+ },
534
+ "config": {
535
+ "model": "hf",
536
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
537
+ "batch_size": "auto",
538
+ "batch_sizes": [
539
+ 64
540
+ ],
541
+ "device": null,
542
+ "use_cache": null,
543
+ "limit": null,
544
+ "bootstrap_iters": 100000,
545
+ "gen_kwargs": null
546
+ },
547
+ "git_hash": "1ee41f7"
548
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xnli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:714bc81b1a9b797bb95c1606b76f2e5cc9f714e987136ff3c92e61a8a301a04b
3
+ size 96028
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,423 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "xstorycloze": {
4
+ "acc,none": 0.5785452138860477,
5
+ "acc_stderr,none": 0.046882211406773226,
6
+ "alias": "xstorycloze"
7
+ },
8
+ "xstorycloze_ar": {
9
+ "acc,none": 0.5373924553275976,
10
+ "acc_stderr,none": 0.012831093347016556,
11
+ "alias": " - xstorycloze_ar"
12
+ },
13
+ "xstorycloze_en": {
14
+ "acc,none": 0.7200529450694904,
15
+ "acc_stderr,none": 0.011553982180012723,
16
+ "alias": " - xstorycloze_en"
17
+ },
18
+ "xstorycloze_es": {
19
+ "acc,none": 0.6293845135671741,
20
+ "acc_stderr,none": 0.012428861084065901,
21
+ "alias": " - xstorycloze_es"
22
+ },
23
+ "xstorycloze_eu": {
24
+ "acc,none": 0.5334215751158173,
25
+ "acc_stderr,none": 0.01283834793473167,
26
+ "alias": " - xstorycloze_eu"
27
+ },
28
+ "xstorycloze_hi": {
29
+ "acc,none": 0.5407015221707479,
30
+ "acc_stderr,none": 0.012824422739625585,
31
+ "alias": " - xstorycloze_hi"
32
+ },
33
+ "xstorycloze_id": {
34
+ "acc,none": 0.614824619457313,
35
+ "acc_stderr,none": 0.012523231571141184,
36
+ "alias": " - xstorycloze_id"
37
+ },
38
+ "xstorycloze_my": {
39
+ "acc,none": 0.49172733289212445,
40
+ "acc_stderr,none": 0.012865364020375396,
41
+ "alias": " - xstorycloze_my"
42
+ },
43
+ "xstorycloze_ru": {
44
+ "acc,none": 0.6207809397749835,
45
+ "acc_stderr,none": 0.012486070771171334,
46
+ "alias": " - xstorycloze_ru"
47
+ },
48
+ "xstorycloze_sw": {
49
+ "acc,none": 0.5115817339510258,
50
+ "acc_stderr,none": 0.012863672949335879,
51
+ "alias": " - xstorycloze_sw"
52
+ },
53
+ "xstorycloze_te": {
54
+ "acc,none": 0.5691594970218399,
55
+ "acc_stderr,none": 0.012743443034698407,
56
+ "alias": " - xstorycloze_te"
57
+ },
58
+ "xstorycloze_zh": {
59
+ "acc,none": 0.5949702183984117,
60
+ "acc_stderr,none": 0.01263288721875138,
61
+ "alias": " - xstorycloze_zh"
62
+ }
63
+ },
64
+ "groups": {
65
+ "xstorycloze": {
66
+ "acc,none": 0.5785452138860477,
67
+ "acc_stderr,none": 0.046882211406773226,
68
+ "alias": "xstorycloze"
69
+ }
70
+ },
71
+ "configs": {
72
+ "xstorycloze_ar": {
73
+ "task": "xstorycloze_ar",
74
+ "group": "xstorycloze",
75
+ "dataset_path": "juletxara/xstory_cloze",
76
+ "dataset_name": "ar",
77
+ "training_split": "train",
78
+ "validation_split": "eval",
79
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
80
+ "doc_to_target": "{{answer_right_ending-1}}",
81
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
82
+ "description": "",
83
+ "target_delimiter": " ",
84
+ "fewshot_delimiter": "\n\n",
85
+ "metric_list": [
86
+ {
87
+ "metric": "acc",
88
+ "aggregation": "mean",
89
+ "higher_is_better": true
90
+ }
91
+ ],
92
+ "output_type": "multiple_choice",
93
+ "repeats": 1,
94
+ "should_decontaminate": true,
95
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
96
+ "metadata": {
97
+ "version": 1.0
98
+ }
99
+ },
100
+ "xstorycloze_en": {
101
+ "task": "xstorycloze_en",
102
+ "group": "xstorycloze",
103
+ "dataset_path": "juletxara/xstory_cloze",
104
+ "dataset_name": "en",
105
+ "training_split": "train",
106
+ "validation_split": "eval",
107
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
108
+ "doc_to_target": "{{answer_right_ending-1}}",
109
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
110
+ "description": "",
111
+ "target_delimiter": " ",
112
+ "fewshot_delimiter": "\n\n",
113
+ "metric_list": [
114
+ {
115
+ "metric": "acc",
116
+ "aggregation": "mean",
117
+ "higher_is_better": true
118
+ }
119
+ ],
120
+ "output_type": "multiple_choice",
121
+ "repeats": 1,
122
+ "should_decontaminate": true,
123
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
124
+ "metadata": {
125
+ "version": 1.0
126
+ }
127
+ },
128
+ "xstorycloze_es": {
129
+ "task": "xstorycloze_es",
130
+ "group": "xstorycloze",
131
+ "dataset_path": "juletxara/xstory_cloze",
132
+ "dataset_name": "es",
133
+ "training_split": "train",
134
+ "validation_split": "eval",
135
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
136
+ "doc_to_target": "{{answer_right_ending-1}}",
137
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
138
+ "description": "",
139
+ "target_delimiter": " ",
140
+ "fewshot_delimiter": "\n\n",
141
+ "metric_list": [
142
+ {
143
+ "metric": "acc",
144
+ "aggregation": "mean",
145
+ "higher_is_better": true
146
+ }
147
+ ],
148
+ "output_type": "multiple_choice",
149
+ "repeats": 1,
150
+ "should_decontaminate": true,
151
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
152
+ "metadata": {
153
+ "version": 1.0
154
+ }
155
+ },
156
+ "xstorycloze_eu": {
157
+ "task": "xstorycloze_eu",
158
+ "group": "xstorycloze",
159
+ "dataset_path": "juletxara/xstory_cloze",
160
+ "dataset_name": "eu",
161
+ "training_split": "train",
162
+ "validation_split": "eval",
163
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
164
+ "doc_to_target": "{{answer_right_ending-1}}",
165
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
166
+ "description": "",
167
+ "target_delimiter": " ",
168
+ "fewshot_delimiter": "\n\n",
169
+ "metric_list": [
170
+ {
171
+ "metric": "acc",
172
+ "aggregation": "mean",
173
+ "higher_is_better": true
174
+ }
175
+ ],
176
+ "output_type": "multiple_choice",
177
+ "repeats": 1,
178
+ "should_decontaminate": true,
179
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
180
+ "metadata": {
181
+ "version": 1.0
182
+ }
183
+ },
184
+ "xstorycloze_hi": {
185
+ "task": "xstorycloze_hi",
186
+ "group": "xstorycloze",
187
+ "dataset_path": "juletxara/xstory_cloze",
188
+ "dataset_name": "hi",
189
+ "training_split": "train",
190
+ "validation_split": "eval",
191
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
192
+ "doc_to_target": "{{answer_right_ending-1}}",
193
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
194
+ "description": "",
195
+ "target_delimiter": " ",
196
+ "fewshot_delimiter": "\n\n",
197
+ "metric_list": [
198
+ {
199
+ "metric": "acc",
200
+ "aggregation": "mean",
201
+ "higher_is_better": true
202
+ }
203
+ ],
204
+ "output_type": "multiple_choice",
205
+ "repeats": 1,
206
+ "should_decontaminate": true,
207
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
208
+ "metadata": {
209
+ "version": 1.0
210
+ }
211
+ },
212
+ "xstorycloze_id": {
213
+ "task": "xstorycloze_id",
214
+ "group": "xstorycloze",
215
+ "dataset_path": "juletxara/xstory_cloze",
216
+ "dataset_name": "id",
217
+ "training_split": "train",
218
+ "validation_split": "eval",
219
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
220
+ "doc_to_target": "{{answer_right_ending-1}}",
221
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
222
+ "description": "",
223
+ "target_delimiter": " ",
224
+ "fewshot_delimiter": "\n\n",
225
+ "metric_list": [
226
+ {
227
+ "metric": "acc",
228
+ "aggregation": "mean",
229
+ "higher_is_better": true
230
+ }
231
+ ],
232
+ "output_type": "multiple_choice",
233
+ "repeats": 1,
234
+ "should_decontaminate": true,
235
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
236
+ "metadata": {
237
+ "version": 1.0
238
+ }
239
+ },
240
+ "xstorycloze_my": {
241
+ "task": "xstorycloze_my",
242
+ "group": "xstorycloze",
243
+ "dataset_path": "juletxara/xstory_cloze",
244
+ "dataset_name": "my",
245
+ "training_split": "train",
246
+ "validation_split": "eval",
247
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
248
+ "doc_to_target": "{{answer_right_ending-1}}",
249
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
250
+ "description": "",
251
+ "target_delimiter": " ",
252
+ "fewshot_delimiter": "\n\n",
253
+ "metric_list": [
254
+ {
255
+ "metric": "acc",
256
+ "aggregation": "mean",
257
+ "higher_is_better": true
258
+ }
259
+ ],
260
+ "output_type": "multiple_choice",
261
+ "repeats": 1,
262
+ "should_decontaminate": true,
263
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
264
+ "metadata": {
265
+ "version": 1.0
266
+ }
267
+ },
268
+ "xstorycloze_ru": {
269
+ "task": "xstorycloze_ru",
270
+ "group": "xstorycloze",
271
+ "dataset_path": "juletxara/xstory_cloze",
272
+ "dataset_name": "ru",
273
+ "training_split": "train",
274
+ "validation_split": "eval",
275
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
276
+ "doc_to_target": "{{answer_right_ending-1}}",
277
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
278
+ "description": "",
279
+ "target_delimiter": " ",
280
+ "fewshot_delimiter": "\n\n",
281
+ "metric_list": [
282
+ {
283
+ "metric": "acc",
284
+ "aggregation": "mean",
285
+ "higher_is_better": true
286
+ }
287
+ ],
288
+ "output_type": "multiple_choice",
289
+ "repeats": 1,
290
+ "should_decontaminate": true,
291
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
292
+ "metadata": {
293
+ "version": 1.0
294
+ }
295
+ },
296
+ "xstorycloze_sw": {
297
+ "task": "xstorycloze_sw",
298
+ "group": "xstorycloze",
299
+ "dataset_path": "juletxara/xstory_cloze",
300
+ "dataset_name": "sw",
301
+ "training_split": "train",
302
+ "validation_split": "eval",
303
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
304
+ "doc_to_target": "{{answer_right_ending-1}}",
305
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
306
+ "description": "",
307
+ "target_delimiter": " ",
308
+ "fewshot_delimiter": "\n\n",
309
+ "metric_list": [
310
+ {
311
+ "metric": "acc",
312
+ "aggregation": "mean",
313
+ "higher_is_better": true
314
+ }
315
+ ],
316
+ "output_type": "multiple_choice",
317
+ "repeats": 1,
318
+ "should_decontaminate": true,
319
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
320
+ "metadata": {
321
+ "version": 1.0
322
+ }
323
+ },
324
+ "xstorycloze_te": {
325
+ "task": "xstorycloze_te",
326
+ "group": "xstorycloze",
327
+ "dataset_path": "juletxara/xstory_cloze",
328
+ "dataset_name": "te",
329
+ "training_split": "train",
330
+ "validation_split": "eval",
331
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
332
+ "doc_to_target": "{{answer_right_ending-1}}",
333
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
334
+ "description": "",
335
+ "target_delimiter": " ",
336
+ "fewshot_delimiter": "\n\n",
337
+ "metric_list": [
338
+ {
339
+ "metric": "acc",
340
+ "aggregation": "mean",
341
+ "higher_is_better": true
342
+ }
343
+ ],
344
+ "output_type": "multiple_choice",
345
+ "repeats": 1,
346
+ "should_decontaminate": true,
347
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
348
+ "metadata": {
349
+ "version": 1.0
350
+ }
351
+ },
352
+ "xstorycloze_zh": {
353
+ "task": "xstorycloze_zh",
354
+ "group": "xstorycloze",
355
+ "dataset_path": "juletxara/xstory_cloze",
356
+ "dataset_name": "zh",
357
+ "training_split": "train",
358
+ "validation_split": "eval",
359
+ "doc_to_text": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
360
+ "doc_to_target": "{{answer_right_ending-1}}",
361
+ "doc_to_choice": "{{[sentence_quiz1, sentence_quiz2]}}",
362
+ "description": "",
363
+ "target_delimiter": " ",
364
+ "fewshot_delimiter": "\n\n",
365
+ "metric_list": [
366
+ {
367
+ "metric": "acc",
368
+ "aggregation": "mean",
369
+ "higher_is_better": true
370
+ }
371
+ ],
372
+ "output_type": "multiple_choice",
373
+ "repeats": 1,
374
+ "should_decontaminate": true,
375
+ "doc_to_decontamination_query": "{{[input_sentence_1, input_sentence_2, input_sentence_3, input_sentence_4]|join(' ')}}",
376
+ "metadata": {
377
+ "version": 1.0
378
+ }
379
+ }
380
+ },
381
+ "versions": {
382
+ "xstorycloze": "N/A",
383
+ "xstorycloze_ar": 1.0,
384
+ "xstorycloze_en": 1.0,
385
+ "xstorycloze_es": 1.0,
386
+ "xstorycloze_eu": 1.0,
387
+ "xstorycloze_hi": 1.0,
388
+ "xstorycloze_id": 1.0,
389
+ "xstorycloze_my": 1.0,
390
+ "xstorycloze_ru": 1.0,
391
+ "xstorycloze_sw": 1.0,
392
+ "xstorycloze_te": 1.0,
393
+ "xstorycloze_zh": 1.0
394
+ },
395
+ "n-shot": {
396
+ "xstorycloze": 0,
397
+ "xstorycloze_ar": 0,
398
+ "xstorycloze_en": 0,
399
+ "xstorycloze_es": 0,
400
+ "xstorycloze_eu": 0,
401
+ "xstorycloze_hi": 0,
402
+ "xstorycloze_id": 0,
403
+ "xstorycloze_my": 0,
404
+ "xstorycloze_ru": 0,
405
+ "xstorycloze_sw": 0,
406
+ "xstorycloze_te": 0,
407
+ "xstorycloze_zh": 0
408
+ },
409
+ "config": {
410
+ "model": "hf",
411
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
412
+ "batch_size": "auto",
413
+ "batch_sizes": [
414
+ 64
415
+ ],
416
+ "device": null,
417
+ "use_cache": null,
418
+ "limit": null,
419
+ "bootstrap_iters": 100000,
420
+ "gen_kwargs": null
421
+ },
422
+ "git_hash": "1ee41f7"
423
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xstorycloze/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02c0a35886054983b3006e85f8ec40648b976eb0097ebdb4f54a1d47588b72b0
3
+ size 66269
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "xwinograd": {
4
+ "acc,none": 0.731175545066307,
5
+ "acc_stderr,none": 0.04568831187382474,
6
+ "alias": "xwinograd"
7
+ },
8
+ "xwinograd_en": {
9
+ "acc,none": 0.8094623655913978,
10
+ "acc_stderr,none": 0.008146492341553319,
11
+ "alias": " - xwinograd_en"
12
+ },
13
+ "xwinograd_fr": {
14
+ "acc,none": 0.7108433734939759,
15
+ "acc_stderr,none": 0.050066428050419214,
16
+ "alias": " - xwinograd_fr"
17
+ },
18
+ "xwinograd_jp": {
19
+ "acc,none": 0.6068821689259646,
20
+ "acc_stderr,none": 0.015780865040470965,
21
+ "alias": " - xwinograd_jp"
22
+ },
23
+ "xwinograd_pt": {
24
+ "acc,none": 0.6577946768060836,
25
+ "acc_stderr,none": 0.029311491114275143,
26
+ "alias": " - xwinograd_pt"
27
+ },
28
+ "xwinograd_ru": {
29
+ "acc,none": 0.6507936507936508,
30
+ "acc_stderr,none": 0.026902825537698707,
31
+ "alias": " - xwinograd_ru"
32
+ },
33
+ "xwinograd_zh": {
34
+ "acc,none": 0.6984126984126984,
35
+ "acc_stderr,none": 0.02046343784622378,
36
+ "alias": " - xwinograd_zh"
37
+ }
38
+ },
39
+ "groups": {
40
+ "xwinograd": {
41
+ "acc,none": 0.731175545066307,
42
+ "acc_stderr,none": 0.04568831187382474,
43
+ "alias": "xwinograd"
44
+ }
45
+ },
46
+ "configs": {
47
+ "xwinograd_en": {
48
+ "task": "xwinograd_en",
49
+ "group": [
50
+ "xwinograd"
51
+ ],
52
+ "dataset_path": "Muennighoff/xwinograd",
53
+ "dataset_name": "en",
54
+ "test_split": "test",
55
+ "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
56
+ "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
57
+ "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
58
+ "description": "",
59
+ "target_delimiter": " ",
60
+ "fewshot_delimiter": "\n\n",
61
+ "metric_list": [
62
+ {
63
+ "metric": "acc",
64
+ "aggregation": "mean",
65
+ "higher_is_better": true
66
+ }
67
+ ],
68
+ "output_type": "multiple_choice",
69
+ "repeats": 1,
70
+ "should_decontaminate": false,
71
+ "metadata": {
72
+ "version": 1.0
73
+ }
74
+ },
75
+ "xwinograd_fr": {
76
+ "task": "xwinograd_fr",
77
+ "group": [
78
+ "xwinograd"
79
+ ],
80
+ "dataset_path": "Muennighoff/xwinograd",
81
+ "dataset_name": "fr",
82
+ "test_split": "test",
83
+ "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
84
+ "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
85
+ "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
86
+ "description": "",
87
+ "target_delimiter": " ",
88
+ "fewshot_delimiter": "\n\n",
89
+ "metric_list": [
90
+ {
91
+ "metric": "acc",
92
+ "aggregation": "mean",
93
+ "higher_is_better": true
94
+ }
95
+ ],
96
+ "output_type": "multiple_choice",
97
+ "repeats": 1,
98
+ "should_decontaminate": false,
99
+ "metadata": {
100
+ "version": 1.0
101
+ }
102
+ },
103
+ "xwinograd_jp": {
104
+ "task": "xwinograd_jp",
105
+ "group": [
106
+ "xwinograd"
107
+ ],
108
+ "dataset_path": "Muennighoff/xwinograd",
109
+ "dataset_name": "jp",
110
+ "test_split": "test",
111
+ "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
112
+ "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
113
+ "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
114
+ "description": "",
115
+ "target_delimiter": " ",
116
+ "fewshot_delimiter": "\n\n",
117
+ "metric_list": [
118
+ {
119
+ "metric": "acc",
120
+ "aggregation": "mean",
121
+ "higher_is_better": true
122
+ }
123
+ ],
124
+ "output_type": "multiple_choice",
125
+ "repeats": 1,
126
+ "should_decontaminate": false,
127
+ "metadata": {
128
+ "version": 1.0
129
+ }
130
+ },
131
+ "xwinograd_pt": {
132
+ "task": "xwinograd_pt",
133
+ "group": [
134
+ "xwinograd"
135
+ ],
136
+ "dataset_path": "Muennighoff/xwinograd",
137
+ "dataset_name": "pt",
138
+ "test_split": "test",
139
+ "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
140
+ "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
141
+ "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
142
+ "description": "",
143
+ "target_delimiter": " ",
144
+ "fewshot_delimiter": "\n\n",
145
+ "metric_list": [
146
+ {
147
+ "metric": "acc",
148
+ "aggregation": "mean",
149
+ "higher_is_better": true
150
+ }
151
+ ],
152
+ "output_type": "multiple_choice",
153
+ "repeats": 1,
154
+ "should_decontaminate": false,
155
+ "metadata": {
156
+ "version": 1.0
157
+ }
158
+ },
159
+ "xwinograd_ru": {
160
+ "task": "xwinograd_ru",
161
+ "group": [
162
+ "xwinograd"
163
+ ],
164
+ "dataset_path": "Muennighoff/xwinograd",
165
+ "dataset_name": "ru",
166
+ "test_split": "test",
167
+ "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
168
+ "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
169
+ "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
170
+ "description": "",
171
+ "target_delimiter": " ",
172
+ "fewshot_delimiter": "\n\n",
173
+ "metric_list": [
174
+ {
175
+ "metric": "acc",
176
+ "aggregation": "mean",
177
+ "higher_is_better": true
178
+ }
179
+ ],
180
+ "output_type": "multiple_choice",
181
+ "repeats": 1,
182
+ "should_decontaminate": false,
183
+ "metadata": {
184
+ "version": 1.0
185
+ }
186
+ },
187
+ "xwinograd_zh": {
188
+ "task": "xwinograd_zh",
189
+ "group": [
190
+ "xwinograd"
191
+ ],
192
+ "dataset_path": "Muennighoff/xwinograd",
193
+ "dataset_name": "zh",
194
+ "test_split": "test",
195
+ "doc_to_text": "def doc_to_text(doc: Dict) -> int:\n \"\"\"\n Return index of the correct choice.\n\n Note: We are using the \"multiple input\" mode of the multiple-choice\n output-type, which means we use different contexts with the same target\n for the different choices, rather than the same context and different targets.\n \"\"\"\n answer_to_num = {\"1\": 0, \"2\": 1}\n return answer_to_num[doc[\"answer\"]]\n",
196
+ "doc_to_target": "def doc_to_target(doc: Dict) -> str:\n \"\"\"\n Return the target completion.\n\n Note that this does not depend on the correct choice as we are using\n \"multiple input\" mode.\n \"\"\"\n idx = doc[\"sentence\"].index(\"_\") + 1\n return doc[\"sentence\"][idx:].strip()\n",
197
+ "doc_to_choice": "def doc_to_choice(doc: Dict) -> List[str]:\n \"\"\"Return the choices that will be used as contexts in \"multiple input\" mode.\"\"\"\n idx = doc[\"sentence\"].index(\"_\")\n options = [doc[\"option1\"], doc[\"option2\"]]\n return [doc[\"sentence\"][:idx] + opt for opt in options]\n",
198
+ "description": "",
199
+ "target_delimiter": " ",
200
+ "fewshot_delimiter": "\n\n",
201
+ "metric_list": [
202
+ {
203
+ "metric": "acc",
204
+ "aggregation": "mean",
205
+ "higher_is_better": true
206
+ }
207
+ ],
208
+ "output_type": "multiple_choice",
209
+ "repeats": 1,
210
+ "should_decontaminate": false,
211
+ "metadata": {
212
+ "version": 1.0
213
+ }
214
+ }
215
+ },
216
+ "versions": {
217
+ "xwinograd": "N/A",
218
+ "xwinograd_en": 1.0,
219
+ "xwinograd_fr": 1.0,
220
+ "xwinograd_jp": 1.0,
221
+ "xwinograd_pt": 1.0,
222
+ "xwinograd_ru": 1.0,
223
+ "xwinograd_zh": 1.0
224
+ },
225
+ "n-shot": {
226
+ "xwinograd": 0,
227
+ "xwinograd_en": 0,
228
+ "xwinograd_fr": 0,
229
+ "xwinograd_jp": 0,
230
+ "xwinograd_pt": 0,
231
+ "xwinograd_ru": 0,
232
+ "xwinograd_zh": 0
233
+ },
234
+ "config": {
235
+ "model": "hf",
236
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-1b5,dtype=bfloat16,trust_remote_code=True",
237
+ "batch_size": "auto",
238
+ "batch_sizes": [
239
+ 64
240
+ ],
241
+ "device": null,
242
+ "use_cache": null,
243
+ "limit": null,
244
+ "bootstrap_iters": 100000,
245
+ "gen_kwargs": null
246
+ },
247
+ "git_hash": "1ee41f7"
248
+ }
lm-eval-output/SmerkyG/rwkv-5-world-1b5/xwinograd/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cca47945ffd0b9dfffb5f897ba2af5cbc65a3f2b722ed9d9889eb25c470e80a
3
+ size 60297
lm-eval-output/SmerkyG/rwkv-5-world-3b/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "ai2_arc": {
4
+ "acc,none": 0.5727170236753101,
5
+ "acc_stderr,none": 0.10942748330722392,
6
+ "acc_norm,none": 0.547914317925592,
7
+ "acc_norm_stderr,none": 0.08710699872372187,
8
+ "alias": "ai2_arc"
9
+ },
10
+ "arc_challenge": {
11
+ "acc,none": 0.3412969283276451,
12
+ "acc_stderr,none": 0.013855831287497728,
13
+ "acc_norm,none": 0.3643344709897611,
14
+ "acc_norm_stderr,none": 0.014063260279882413,
15
+ "alias": " - arc_challenge"
16
+ },
17
+ "arc_easy": {
18
+ "acc,none": 0.6868686868686869,
19
+ "acc_stderr,none": 0.00951630387930954,
20
+ "acc_norm,none": 0.6384680134680135,
21
+ "acc_norm_stderr,none": 0.00985850654316206,
22
+ "alias": " - arc_easy"
23
+ }
24
+ },
25
+ "groups": {
26
+ "ai2_arc": {
27
+ "acc,none": 0.5727170236753101,
28
+ "acc_stderr,none": 0.10942748330722392,
29
+ "acc_norm,none": 0.547914317925592,
30
+ "acc_norm_stderr,none": 0.08710699872372187,
31
+ "alias": "ai2_arc"
32
+ }
33
+ },
34
+ "configs": {
35
+ "arc_challenge": {
36
+ "task": "arc_challenge",
37
+ "group": [
38
+ "ai2_arc"
39
+ ],
40
+ "dataset_path": "allenai/ai2_arc",
41
+ "dataset_name": "ARC-Challenge",
42
+ "training_split": "train",
43
+ "validation_split": "validation",
44
+ "test_split": "test",
45
+ "doc_to_text": "Question: {{question}}\nAnswer:",
46
+ "doc_to_target": "{{choices.label.index(answerKey)}}",
47
+ "doc_to_choice": "{{choices.text}}",
48
+ "description": "",
49
+ "target_delimiter": " ",
50
+ "fewshot_delimiter": "\n\n",
51
+ "metric_list": [
52
+ {
53
+ "metric": "acc",
54
+ "aggregation": "mean",
55
+ "higher_is_better": true
56
+ },
57
+ {
58
+ "metric": "acc_norm",
59
+ "aggregation": "mean",
60
+ "higher_is_better": true
61
+ }
62
+ ],
63
+ "output_type": "multiple_choice",
64
+ "repeats": 1,
65
+ "should_decontaminate": true,
66
+ "doc_to_decontamination_query": "Question: {{question}}\nAnswer:",
67
+ "metadata": {
68
+ "version": 1.0
69
+ }
70
+ },
71
+ "arc_easy": {
72
+ "task": "arc_easy",
73
+ "group": [
74
+ "ai2_arc"
75
+ ],
76
+ "dataset_path": "allenai/ai2_arc",
77
+ "dataset_name": "ARC-Easy",
78
+ "training_split": "train",
79
+ "validation_split": "validation",
80
+ "test_split": "test",
81
+ "doc_to_text": "Question: {{question}}\nAnswer:",
82
+ "doc_to_target": "{{choices.label.index(answerKey)}}",
83
+ "doc_to_choice": "{{choices.text}}",
84
+ "description": "",
85
+ "target_delimiter": " ",
86
+ "fewshot_delimiter": "\n\n",
87
+ "metric_list": [
88
+ {
89
+ "metric": "acc",
90
+ "aggregation": "mean",
91
+ "higher_is_better": true
92
+ },
93
+ {
94
+ "metric": "acc_norm",
95
+ "aggregation": "mean",
96
+ "higher_is_better": true
97
+ }
98
+ ],
99
+ "output_type": "multiple_choice",
100
+ "repeats": 1,
101
+ "should_decontaminate": true,
102
+ "doc_to_decontamination_query": "Question: {{question}}\nAnswer:",
103
+ "metadata": {
104
+ "version": 1.0
105
+ }
106
+ }
107
+ },
108
+ "versions": {
109
+ "ai2_arc": "N/A",
110
+ "arc_challenge": 1.0,
111
+ "arc_easy": 1.0
112
+ },
113
+ "n-shot": {
114
+ "ai2_arc": 0,
115
+ "arc_challenge": 0,
116
+ "arc_easy": 0
117
+ },
118
+ "config": {
119
+ "model": "hf",
120
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-3b,dtype=bfloat16,trust_remote_code=True",
121
+ "batch_size": "auto",
122
+ "batch_sizes": [
123
+ 64
124
+ ],
125
+ "device": null,
126
+ "use_cache": null,
127
+ "limit": null,
128
+ "bootstrap_iters": 100000,
129
+ "gen_kwargs": null
130
+ },
131
+ "git_hash": "1ee41f7"
132
+ }
lm-eval-output/SmerkyG/rwkv-5-world-3b/ai2_arc/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01683ce8c83a5edff112e6bc70781594ed00d27b6d09eff636ff46d4b8cb678b
3
+ size 47723
lm-eval-output/SmerkyG/rwkv-5-world-3b/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "anli": {
4
+ "acc,none": 0.34375,
5
+ "acc_stderr,none": 0.01498089438146567,
6
+ "alias": "anli"
7
+ },
8
+ "anli_r1": {
9
+ "acc,none": 0.346,
10
+ "acc_stderr,none": 0.015050266127564436,
11
+ "alias": " - anli_r1"
12
+ },
13
+ "anli_r2": {
14
+ "acc,none": 0.351,
15
+ "acc_stderr,none": 0.015100563798316407,
16
+ "alias": " - anli_r2"
17
+ },
18
+ "anli_r3": {
19
+ "acc,none": 0.3358333333333333,
20
+ "acc_stderr,none": 0.013639261190932879,
21
+ "alias": " - anli_r3"
22
+ }
23
+ },
24
+ "groups": {
25
+ "anli": {
26
+ "acc,none": 0.34375,
27
+ "acc_stderr,none": 0.01498089438146567,
28
+ "alias": "anli"
29
+ }
30
+ },
31
+ "configs": {
32
+ "anli_r1": {
33
+ "task": "anli_r1",
34
+ "group": [
35
+ "anli"
36
+ ],
37
+ "dataset_path": "anli",
38
+ "training_split": "train_r1",
39
+ "validation_split": "dev_r1",
40
+ "test_split": "test_r1",
41
+ "doc_to_text": "{{premise}}\nQuestion: {{hypothesis}} True, False, or Neither?\nAnswer:",
42
+ "doc_to_target": "{{['True', 'Neither', 'False'][label]}}",
43
+ "doc_to_choice": [
44
+ "True",
45
+ "Neither",
46
+ "False"
47
+ ],
48
+ "description": "",
49
+ "target_delimiter": " ",
50
+ "fewshot_delimiter": "\n\n",
51
+ "metric_list": [
52
+ {
53
+ "metric": "acc",
54
+ "aggregation": "mean",
55
+ "higher_is_better": true
56
+ }
57
+ ],
58
+ "output_type": "multiple_choice",
59
+ "repeats": 1,
60
+ "should_decontaminate": true,
61
+ "doc_to_decontamination_query": "premise",
62
+ "metadata": {
63
+ "version": 1.0
64
+ }
65
+ },
66
+ "anli_r2": {
67
+ "task": "anli_r2",
68
+ "group": [
69
+ "anli"
70
+ ],
71
+ "dataset_path": "anli",
72
+ "training_split": "train_r2",
73
+ "validation_split": "dev_r2",
74
+ "test_split": "test_r2",
75
+ "doc_to_text": "{{premise}}\nQuestion: {{hypothesis}} True, False, or Neither?\nAnswer:",
76
+ "doc_to_target": "{{['True', 'Neither', 'False'][label]}}",
77
+ "doc_to_choice": [
78
+ "True",
79
+ "Neither",
80
+ "False"
81
+ ],
82
+ "description": "",
83
+ "target_delimiter": " ",
84
+ "fewshot_delimiter": "\n\n",
85
+ "metric_list": [
86
+ {
87
+ "metric": "acc",
88
+ "aggregation": "mean",
89
+ "higher_is_better": true
90
+ }
91
+ ],
92
+ "output_type": "multiple_choice",
93
+ "repeats": 1,
94
+ "should_decontaminate": true,
95
+ "doc_to_decontamination_query": "premise",
96
+ "metadata": {
97
+ "version": 1.0
98
+ }
99
+ },
100
+ "anli_r3": {
101
+ "task": "anli_r3",
102
+ "group": [
103
+ "anli"
104
+ ],
105
+ "dataset_path": "anli",
106
+ "training_split": "train_r3",
107
+ "validation_split": "dev_r3",
108
+ "test_split": "test_r3",
109
+ "doc_to_text": "{{premise}}\nQuestion: {{hypothesis}} True, False, or Neither?\nAnswer:",
110
+ "doc_to_target": "{{['True', 'Neither', 'False'][label]}}",
111
+ "doc_to_choice": [
112
+ "True",
113
+ "Neither",
114
+ "False"
115
+ ],
116
+ "description": "",
117
+ "target_delimiter": " ",
118
+ "fewshot_delimiter": "\n\n",
119
+ "metric_list": [
120
+ {
121
+ "metric": "acc",
122
+ "aggregation": "mean",
123
+ "higher_is_better": true
124
+ }
125
+ ],
126
+ "output_type": "multiple_choice",
127
+ "repeats": 1,
128
+ "should_decontaminate": true,
129
+ "doc_to_decontamination_query": "premise",
130
+ "metadata": {
131
+ "version": 1.0
132
+ }
133
+ }
134
+ },
135
+ "versions": {
136
+ "anli": "N/A",
137
+ "anli_r1": 1.0,
138
+ "anli_r2": 1.0,
139
+ "anli_r3": 1.0
140
+ },
141
+ "n-shot": {
142
+ "anli": 0,
143
+ "anli_r1": 0,
144
+ "anli_r2": 0,
145
+ "anli_r3": 0
146
+ },
147
+ "config": {
148
+ "model": "hf",
149
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-3b,dtype=bfloat16,trust_remote_code=True",
150
+ "batch_size": "auto",
151
+ "batch_sizes": [
152
+ 64
153
+ ],
154
+ "device": null,
155
+ "use_cache": null,
156
+ "limit": null,
157
+ "bootstrap_iters": 100000,
158
+ "gen_kwargs": null
159
+ },
160
+ "git_hash": "1ee41f7"
161
+ }
lm-eval-output/SmerkyG/rwkv-5-world-3b/anli/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c7f6f7ac954cae5ee247cabc31835d6835e1dd702a1eeda34bced4dc7cede4a
3
+ size 69276
lm-eval-output/SmerkyG/rwkv-5-world-3b/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/results.json ADDED
@@ -0,0 +1,2249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "blimp": {
4
+ "acc,none": 0.8394328358208956,
5
+ "acc_stderr,none": 0.13653720128092459,
6
+ "alias": "blimp"
7
+ },
8
+ "blimp_adjunct_island": {
9
+ "acc,none": 0.909,
10
+ "acc_stderr,none": 0.009099549538400236,
11
+ "alias": " - blimp_adjunct_island"
12
+ },
13
+ "blimp_anaphor_gender_agreement": {
14
+ "acc,none": 0.986,
15
+ "acc_stderr,none": 0.003717232548256562,
16
+ "alias": " - blimp_anaphor_gender_agreement"
17
+ },
18
+ "blimp_anaphor_number_agreement": {
19
+ "acc,none": 0.994,
20
+ "acc_stderr,none": 0.00244335219932984,
21
+ "alias": " - blimp_anaphor_number_agreement"
22
+ },
23
+ "blimp_animate_subject_passive": {
24
+ "acc,none": 0.804,
25
+ "acc_stderr,none": 0.012559527926707363,
26
+ "alias": " - blimp_animate_subject_passive"
27
+ },
28
+ "blimp_animate_subject_trans": {
29
+ "acc,none": 0.89,
30
+ "acc_stderr,none": 0.009899393819724447,
31
+ "alias": " - blimp_animate_subject_trans"
32
+ },
33
+ "blimp_causative": {
34
+ "acc,none": 0.765,
35
+ "acc_stderr,none": 0.01341472903024711,
36
+ "alias": " - blimp_causative"
37
+ },
38
+ "blimp_complex_NP_island": {
39
+ "acc,none": 0.707,
40
+ "acc_stderr,none": 0.014399942998441275,
41
+ "alias": " - blimp_complex_NP_island"
42
+ },
43
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
44
+ "acc,none": 0.697,
45
+ "acc_stderr,none": 0.01453968371053524,
46
+ "alias": " - blimp_coordinate_structure_constraint_complex_left_branch"
47
+ },
48
+ "blimp_coordinate_structure_constraint_object_extraction": {
49
+ "acc,none": 0.868,
50
+ "acc_stderr,none": 0.010709373963528022,
51
+ "alias": " - blimp_coordinate_structure_constraint_object_extraction"
52
+ },
53
+ "blimp_determiner_noun_agreement_1": {
54
+ "acc,none": 0.99,
55
+ "acc_stderr,none": 0.0031480009386767667,
56
+ "alias": " - blimp_determiner_noun_agreement_1"
57
+ },
58
+ "blimp_determiner_noun_agreement_2": {
59
+ "acc,none": 0.984,
60
+ "acc_stderr,none": 0.003969856390319422,
61
+ "alias": " - blimp_determiner_noun_agreement_2"
62
+ },
63
+ "blimp_determiner_noun_agreement_irregular_1": {
64
+ "acc,none": 0.933,
65
+ "acc_stderr,none": 0.007910345983177549,
66
+ "alias": " - blimp_determiner_noun_agreement_irregular_1"
67
+ },
68
+ "blimp_determiner_noun_agreement_irregular_2": {
69
+ "acc,none": 0.934,
70
+ "acc_stderr,none": 0.007855297938697596,
71
+ "alias": " - blimp_determiner_noun_agreement_irregular_2"
72
+ },
73
+ "blimp_determiner_noun_agreement_with_adj_2": {
74
+ "acc,none": 0.964,
75
+ "acc_stderr,none": 0.005893957816165557,
76
+ "alias": " - blimp_determiner_noun_agreement_with_adj_2"
77
+ },
78
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
79
+ "acc,none": 0.915,
80
+ "acc_stderr,none": 0.008823426366942302,
81
+ "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_1"
82
+ },
83
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
84
+ "acc,none": 0.93,
85
+ "acc_stderr,none": 0.00807249435832349,
86
+ "alias": " - blimp_determiner_noun_agreement_with_adj_irregular_2"
87
+ },
88
+ "blimp_determiner_noun_agreement_with_adjective_1": {
89
+ "acc,none": 0.98,
90
+ "acc_stderr,none": 0.004429403980178342,
91
+ "alias": " - blimp_determiner_noun_agreement_with_adjective_1"
92
+ },
93
+ "blimp_distractor_agreement_relational_noun": {
94
+ "acc,none": 0.884,
95
+ "acc_stderr,none": 0.01013146813875699,
96
+ "alias": " - blimp_distractor_agreement_relational_noun"
97
+ },
98
+ "blimp_distractor_agreement_relative_clause": {
99
+ "acc,none": 0.762,
100
+ "acc_stderr,none": 0.01347358666196722,
101
+ "alias": " - blimp_distractor_agreement_relative_clause"
102
+ },
103
+ "blimp_drop_argument": {
104
+ "acc,none": 0.814,
105
+ "acc_stderr,none": 0.012310790208412805,
106
+ "alias": " - blimp_drop_argument"
107
+ },
108
+ "blimp_ellipsis_n_bar_1": {
109
+ "acc,none": 0.852,
110
+ "acc_stderr,none": 0.011234866364235253,
111
+ "alias": " - blimp_ellipsis_n_bar_1"
112
+ },
113
+ "blimp_ellipsis_n_bar_2": {
114
+ "acc,none": 0.912,
115
+ "acc_stderr,none": 0.00896305396259208,
116
+ "alias": " - blimp_ellipsis_n_bar_2"
117
+ },
118
+ "blimp_existential_there_object_raising": {
119
+ "acc,none": 0.859,
120
+ "acc_stderr,none": 0.011010914595992441,
121
+ "alias": " - blimp_existential_there_object_raising"
122
+ },
123
+ "blimp_existential_there_quantifiers_1": {
124
+ "acc,none": 0.994,
125
+ "acc_stderr,none": 0.0024433521993298185,
126
+ "alias": " - blimp_existential_there_quantifiers_1"
127
+ },
128
+ "blimp_existential_there_quantifiers_2": {
129
+ "acc,none": 0.457,
130
+ "acc_stderr,none": 0.01576069159013639,
131
+ "alias": " - blimp_existential_there_quantifiers_2"
132
+ },
133
+ "blimp_existential_there_subject_raising": {
134
+ "acc,none": 0.905,
135
+ "acc_stderr,none": 0.009276910103103315,
136
+ "alias": " - blimp_existential_there_subject_raising"
137
+ },
138
+ "blimp_expletive_it_object_raising": {
139
+ "acc,none": 0.805,
140
+ "acc_stderr,none": 0.012535235623319327,
141
+ "alias": " - blimp_expletive_it_object_raising"
142
+ },
143
+ "blimp_inchoative": {
144
+ "acc,none": 0.743,
145
+ "acc_stderr,none": 0.013825416526895031,
146
+ "alias": " - blimp_inchoative"
147
+ },
148
+ "blimp_intransitive": {
149
+ "acc,none": 0.843,
150
+ "acc_stderr,none": 0.011510146979230189,
151
+ "alias": " - blimp_intransitive"
152
+ },
153
+ "blimp_irregular_past_participle_adjectives": {
154
+ "acc,none": 0.941,
155
+ "acc_stderr,none": 0.007454835650406725,
156
+ "alias": " - blimp_irregular_past_participle_adjectives"
157
+ },
158
+ "blimp_irregular_past_participle_verbs": {
159
+ "acc,none": 0.927,
160
+ "acc_stderr,none": 0.008230354715244075,
161
+ "alias": " - blimp_irregular_past_participle_verbs"
162
+ },
163
+ "blimp_irregular_plural_subject_verb_agreement_1": {
164
+ "acc,none": 0.932,
165
+ "acc_stderr,none": 0.007964887911291605,
166
+ "alias": " - blimp_irregular_plural_subject_verb_agreement_1"
167
+ },
168
+ "blimp_irregular_plural_subject_verb_agreement_2": {
169
+ "acc,none": 0.926,
170
+ "acc_stderr,none": 0.00828206451270415,
171
+ "alias": " - blimp_irregular_plural_subject_verb_agreement_2"
172
+ },
173
+ "blimp_left_branch_island_echo_question": {
174
+ "acc,none": 0.598,
175
+ "acc_stderr,none": 0.015512467135715075,
176
+ "alias": " - blimp_left_branch_island_echo_question"
177
+ },
178
+ "blimp_left_branch_island_simple_question": {
179
+ "acc,none": 0.834,
180
+ "acc_stderr,none": 0.011772110370812203,
181
+ "alias": " - blimp_left_branch_island_simple_question"
182
+ },
183
+ "blimp_matrix_question_npi_licensor_present": {
184
+ "acc,none": 0.581,
185
+ "acc_stderr,none": 0.015610338967577794,
186
+ "alias": " - blimp_matrix_question_npi_licensor_present"
187
+ },
188
+ "blimp_npi_present_1": {
189
+ "acc,none": 0.626,
190
+ "acc_stderr,none": 0.015308767369006366,
191
+ "alias": " - blimp_npi_present_1"
192
+ },
193
+ "blimp_npi_present_2": {
194
+ "acc,none": 0.716,
195
+ "acc_stderr,none": 0.014267009061031309,
196
+ "alias": " - blimp_npi_present_2"
197
+ },
198
+ "blimp_only_npi_licensor_present": {
199
+ "acc,none": 0.866,
200
+ "acc_stderr,none": 0.010777762298369686,
201
+ "alias": " - blimp_only_npi_licensor_present"
202
+ },
203
+ "blimp_only_npi_scope": {
204
+ "acc,none": 0.817,
205
+ "acc_stderr,none": 0.012233587399477823,
206
+ "alias": " - blimp_only_npi_scope"
207
+ },
208
+ "blimp_passive_1": {
209
+ "acc,none": 0.895,
210
+ "acc_stderr,none": 0.009698921026024966,
211
+ "alias": " - blimp_passive_1"
212
+ },
213
+ "blimp_passive_2": {
214
+ "acc,none": 0.905,
215
+ "acc_stderr,none": 0.009276910103103319,
216
+ "alias": " - blimp_passive_2"
217
+ },
218
+ "blimp_principle_A_c_command": {
219
+ "acc,none": 0.763,
220
+ "acc_stderr,none": 0.013454070462577943,
221
+ "alias": " - blimp_principle_A_c_command"
222
+ },
223
+ "blimp_principle_A_case_1": {
224
+ "acc,none": 1.0,
225
+ "acc_stderr,none": 0.0,
226
+ "alias": " - blimp_principle_A_case_1"
227
+ },
228
+ "blimp_principle_A_case_2": {
229
+ "acc,none": 0.975,
230
+ "acc_stderr,none": 0.004939574819698464,
231
+ "alias": " - blimp_principle_A_case_2"
232
+ },
233
+ "blimp_principle_A_domain_1": {
234
+ "acc,none": 0.997,
235
+ "acc_stderr,none": 0.0017303161543469417,
236
+ "alias": " - blimp_principle_A_domain_1"
237
+ },
238
+ "blimp_principle_A_domain_2": {
239
+ "acc,none": 0.914,
240
+ "acc_stderr,none": 0.008870325962594766,
241
+ "alias": " - blimp_principle_A_domain_2"
242
+ },
243
+ "blimp_principle_A_domain_3": {
244
+ "acc,none": 0.852,
245
+ "acc_stderr,none": 0.01123486636423525,
246
+ "alias": " - blimp_principle_A_domain_3"
247
+ },
248
+ "blimp_principle_A_reconstruction": {
249
+ "acc,none": 0.468,
250
+ "acc_stderr,none": 0.01578686875935901,
251
+ "alias": " - blimp_principle_A_reconstruction"
252
+ },
253
+ "blimp_regular_plural_subject_verb_agreement_1": {
254
+ "acc,none": 0.968,
255
+ "acc_stderr,none": 0.005568393575081361,
256
+ "alias": " - blimp_regular_plural_subject_verb_agreement_1"
257
+ },
258
+ "blimp_regular_plural_subject_verb_agreement_2": {
259
+ "acc,none": 0.931,
260
+ "acc_stderr,none": 0.008018934050315162,
261
+ "alias": " - blimp_regular_plural_subject_verb_agreement_2"
262
+ },
263
+ "blimp_sentential_negation_npi_licensor_present": {
264
+ "acc,none": 0.973,
265
+ "acc_stderr,none": 0.005128089049275288,
266
+ "alias": " - blimp_sentential_negation_npi_licensor_present"
267
+ },
268
+ "blimp_sentential_negation_npi_scope": {
269
+ "acc,none": 0.792,
270
+ "acc_stderr,none": 0.01284137457209692,
271
+ "alias": " - blimp_sentential_negation_npi_scope"
272
+ },
273
+ "blimp_sentential_subject_island": {
274
+ "acc,none": 0.483,
275
+ "acc_stderr,none": 0.01581015372983343,
276
+ "alias": " - blimp_sentential_subject_island"
277
+ },
278
+ "blimp_superlative_quantifiers_1": {
279
+ "acc,none": 0.862,
280
+ "acc_stderr,none": 0.010912152632504417,
281
+ "alias": " - blimp_superlative_quantifiers_1"
282
+ },
283
+ "blimp_superlative_quantifiers_2": {
284
+ "acc,none": 0.923,
285
+ "acc_stderr,none": 0.00843458014024062,
286
+ "alias": " - blimp_superlative_quantifiers_2"
287
+ },
288
+ "blimp_tough_vs_raising_1": {
289
+ "acc,none": 0.691,
290
+ "acc_stderr,none": 0.014619600977206493,
291
+ "alias": " - blimp_tough_vs_raising_1"
292
+ },
293
+ "blimp_tough_vs_raising_2": {
294
+ "acc,none": 0.885,
295
+ "acc_stderr,none": 0.010093407594904605,
296
+ "alias": " - blimp_tough_vs_raising_2"
297
+ },
298
+ "blimp_transitive": {
299
+ "acc,none": 0.898,
300
+ "acc_stderr,none": 0.009575368801653876,
301
+ "alias": " - blimp_transitive"
302
+ },
303
+ "blimp_wh_island": {
304
+ "acc,none": 0.766,
305
+ "acc_stderr,none": 0.013394902889660009,
306
+ "alias": " - blimp_wh_island"
307
+ },
308
+ "blimp_wh_questions_object_gap": {
309
+ "acc,none": 0.841,
310
+ "acc_stderr,none": 0.011569479368271306,
311
+ "alias": " - blimp_wh_questions_object_gap"
312
+ },
313
+ "blimp_wh_questions_subject_gap": {
314
+ "acc,none": 0.956,
315
+ "acc_stderr,none": 0.00648892179842742,
316
+ "alias": " - blimp_wh_questions_subject_gap"
317
+ },
318
+ "blimp_wh_questions_subject_gap_long_distance": {
319
+ "acc,none": 0.936,
320
+ "acc_stderr,none": 0.007743640226919304,
321
+ "alias": " - blimp_wh_questions_subject_gap_long_distance"
322
+ },
323
+ "blimp_wh_vs_that_no_gap": {
324
+ "acc,none": 0.975,
325
+ "acc_stderr,none": 0.004939574819698465,
326
+ "alias": " - blimp_wh_vs_that_no_gap"
327
+ },
328
+ "blimp_wh_vs_that_no_gap_long_distance": {
329
+ "acc,none": 0.97,
330
+ "acc_stderr,none": 0.005397140829099214,
331
+ "alias": " - blimp_wh_vs_that_no_gap_long_distance"
332
+ },
333
+ "blimp_wh_vs_that_with_gap": {
334
+ "acc,none": 0.438,
335
+ "acc_stderr,none": 0.01569721001969469,
336
+ "alias": " - blimp_wh_vs_that_with_gap"
337
+ },
338
+ "blimp_wh_vs_that_with_gap_long_distance": {
339
+ "acc,none": 0.341,
340
+ "acc_stderr,none": 0.014998131348402709,
341
+ "alias": " - blimp_wh_vs_that_with_gap_long_distance"
342
+ }
343
+ },
344
+ "groups": {
345
+ "blimp": {
346
+ "acc,none": 0.8394328358208956,
347
+ "acc_stderr,none": 0.13653720128092459,
348
+ "alias": "blimp"
349
+ }
350
+ },
351
+ "configs": {
352
+ "blimp_adjunct_island": {
353
+ "task": "blimp_adjunct_island",
354
+ "group": "blimp",
355
+ "dataset_path": "blimp",
356
+ "dataset_name": "adjunct_island",
357
+ "validation_split": "train",
358
+ "doc_to_text": "",
359
+ "doc_to_target": 0,
360
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
361
+ "description": "",
362
+ "target_delimiter": " ",
363
+ "fewshot_delimiter": "\n\n",
364
+ "num_fewshot": 0,
365
+ "metric_list": [
366
+ {
367
+ "metric": "acc"
368
+ }
369
+ ],
370
+ "output_type": "multiple_choice",
371
+ "repeats": 1,
372
+ "should_decontaminate": true,
373
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
374
+ "metadata": {
375
+ "version": 1.0
376
+ }
377
+ },
378
+ "blimp_anaphor_gender_agreement": {
379
+ "task": "blimp_anaphor_gender_agreement",
380
+ "group": "blimp",
381
+ "dataset_path": "blimp",
382
+ "dataset_name": "anaphor_gender_agreement",
383
+ "validation_split": "train",
384
+ "doc_to_text": "",
385
+ "doc_to_target": 0,
386
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
387
+ "description": "",
388
+ "target_delimiter": " ",
389
+ "fewshot_delimiter": "\n\n",
390
+ "num_fewshot": 0,
391
+ "metric_list": [
392
+ {
393
+ "metric": "acc"
394
+ }
395
+ ],
396
+ "output_type": "multiple_choice",
397
+ "repeats": 1,
398
+ "should_decontaminate": true,
399
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
400
+ "metadata": {
401
+ "version": 1.0
402
+ }
403
+ },
404
+ "blimp_anaphor_number_agreement": {
405
+ "task": "blimp_anaphor_number_agreement",
406
+ "group": "blimp",
407
+ "dataset_path": "blimp",
408
+ "dataset_name": "anaphor_number_agreement",
409
+ "validation_split": "train",
410
+ "doc_to_text": "",
411
+ "doc_to_target": 0,
412
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
413
+ "description": "",
414
+ "target_delimiter": " ",
415
+ "fewshot_delimiter": "\n\n",
416
+ "num_fewshot": 0,
417
+ "metric_list": [
418
+ {
419
+ "metric": "acc"
420
+ }
421
+ ],
422
+ "output_type": "multiple_choice",
423
+ "repeats": 1,
424
+ "should_decontaminate": true,
425
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
426
+ "metadata": {
427
+ "version": 1.0
428
+ }
429
+ },
430
+ "blimp_animate_subject_passive": {
431
+ "task": "blimp_animate_subject_passive",
432
+ "group": "blimp",
433
+ "dataset_path": "blimp",
434
+ "dataset_name": "animate_subject_passive",
435
+ "validation_split": "train",
436
+ "doc_to_text": "",
437
+ "doc_to_target": 0,
438
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
439
+ "description": "",
440
+ "target_delimiter": " ",
441
+ "fewshot_delimiter": "\n\n",
442
+ "num_fewshot": 0,
443
+ "metric_list": [
444
+ {
445
+ "metric": "acc"
446
+ }
447
+ ],
448
+ "output_type": "multiple_choice",
449
+ "repeats": 1,
450
+ "should_decontaminate": true,
451
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
452
+ "metadata": {
453
+ "version": 1.0
454
+ }
455
+ },
456
+ "blimp_animate_subject_trans": {
457
+ "task": "blimp_animate_subject_trans",
458
+ "group": "blimp",
459
+ "dataset_path": "blimp",
460
+ "dataset_name": "animate_subject_trans",
461
+ "validation_split": "train",
462
+ "doc_to_text": "",
463
+ "doc_to_target": 0,
464
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
465
+ "description": "",
466
+ "target_delimiter": " ",
467
+ "fewshot_delimiter": "\n\n",
468
+ "num_fewshot": 0,
469
+ "metric_list": [
470
+ {
471
+ "metric": "acc"
472
+ }
473
+ ],
474
+ "output_type": "multiple_choice",
475
+ "repeats": 1,
476
+ "should_decontaminate": true,
477
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
478
+ "metadata": {
479
+ "version": 1.0
480
+ }
481
+ },
482
+ "blimp_causative": {
483
+ "task": "blimp_causative",
484
+ "group": "blimp",
485
+ "dataset_path": "blimp",
486
+ "dataset_name": "causative",
487
+ "validation_split": "train",
488
+ "doc_to_text": "",
489
+ "doc_to_target": 0,
490
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
491
+ "description": "",
492
+ "target_delimiter": " ",
493
+ "fewshot_delimiter": "\n\n",
494
+ "num_fewshot": 0,
495
+ "metric_list": [
496
+ {
497
+ "metric": "acc"
498
+ }
499
+ ],
500
+ "output_type": "multiple_choice",
501
+ "repeats": 1,
502
+ "should_decontaminate": true,
503
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
504
+ "metadata": {
505
+ "version": 1.0
506
+ }
507
+ },
508
+ "blimp_complex_NP_island": {
509
+ "task": "blimp_complex_NP_island",
510
+ "group": "blimp",
511
+ "dataset_path": "blimp",
512
+ "dataset_name": "complex_NP_island",
513
+ "validation_split": "train",
514
+ "doc_to_text": "",
515
+ "doc_to_target": 0,
516
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
517
+ "description": "",
518
+ "target_delimiter": " ",
519
+ "fewshot_delimiter": "\n\n",
520
+ "num_fewshot": 0,
521
+ "metric_list": [
522
+ {
523
+ "metric": "acc"
524
+ }
525
+ ],
526
+ "output_type": "multiple_choice",
527
+ "repeats": 1,
528
+ "should_decontaminate": true,
529
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
530
+ "metadata": {
531
+ "version": 1.0
532
+ }
533
+ },
534
+ "blimp_coordinate_structure_constraint_complex_left_branch": {
535
+ "task": "blimp_coordinate_structure_constraint_complex_left_branch",
536
+ "group": "blimp",
537
+ "dataset_path": "blimp",
538
+ "dataset_name": "coordinate_structure_constraint_complex_left_branch",
539
+ "validation_split": "train",
540
+ "doc_to_text": "",
541
+ "doc_to_target": 0,
542
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
543
+ "description": "",
544
+ "target_delimiter": " ",
545
+ "fewshot_delimiter": "\n\n",
546
+ "num_fewshot": 0,
547
+ "metric_list": [
548
+ {
549
+ "metric": "acc"
550
+ }
551
+ ],
552
+ "output_type": "multiple_choice",
553
+ "repeats": 1,
554
+ "should_decontaminate": true,
555
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
556
+ "metadata": {
557
+ "version": 1.0
558
+ }
559
+ },
560
+ "blimp_coordinate_structure_constraint_object_extraction": {
561
+ "task": "blimp_coordinate_structure_constraint_object_extraction",
562
+ "group": "blimp",
563
+ "dataset_path": "blimp",
564
+ "dataset_name": "coordinate_structure_constraint_object_extraction",
565
+ "validation_split": "train",
566
+ "doc_to_text": "",
567
+ "doc_to_target": 0,
568
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
569
+ "description": "",
570
+ "target_delimiter": " ",
571
+ "fewshot_delimiter": "\n\n",
572
+ "num_fewshot": 0,
573
+ "metric_list": [
574
+ {
575
+ "metric": "acc"
576
+ }
577
+ ],
578
+ "output_type": "multiple_choice",
579
+ "repeats": 1,
580
+ "should_decontaminate": true,
581
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
582
+ "metadata": {
583
+ "version": 1.0
584
+ }
585
+ },
586
+ "blimp_determiner_noun_agreement_1": {
587
+ "task": "blimp_determiner_noun_agreement_1",
588
+ "group": "blimp",
589
+ "dataset_path": "blimp",
590
+ "dataset_name": "determiner_noun_agreement_1",
591
+ "validation_split": "train",
592
+ "doc_to_text": "",
593
+ "doc_to_target": 0,
594
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
595
+ "description": "",
596
+ "target_delimiter": " ",
597
+ "fewshot_delimiter": "\n\n",
598
+ "num_fewshot": 0,
599
+ "metric_list": [
600
+ {
601
+ "metric": "acc"
602
+ }
603
+ ],
604
+ "output_type": "multiple_choice",
605
+ "repeats": 1,
606
+ "should_decontaminate": true,
607
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
608
+ "metadata": {
609
+ "version": 1.0
610
+ }
611
+ },
612
+ "blimp_determiner_noun_agreement_2": {
613
+ "task": "blimp_determiner_noun_agreement_2",
614
+ "group": "blimp",
615
+ "dataset_path": "blimp",
616
+ "dataset_name": "determiner_noun_agreement_2",
617
+ "validation_split": "train",
618
+ "doc_to_text": "",
619
+ "doc_to_target": 0,
620
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
621
+ "description": "",
622
+ "target_delimiter": " ",
623
+ "fewshot_delimiter": "\n\n",
624
+ "num_fewshot": 0,
625
+ "metric_list": [
626
+ {
627
+ "metric": "acc"
628
+ }
629
+ ],
630
+ "output_type": "multiple_choice",
631
+ "repeats": 1,
632
+ "should_decontaminate": true,
633
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
634
+ "metadata": {
635
+ "version": 1.0
636
+ }
637
+ },
638
+ "blimp_determiner_noun_agreement_irregular_1": {
639
+ "task": "blimp_determiner_noun_agreement_irregular_1",
640
+ "group": "blimp",
641
+ "dataset_path": "blimp",
642
+ "dataset_name": "determiner_noun_agreement_irregular_1",
643
+ "validation_split": "train",
644
+ "doc_to_text": "",
645
+ "doc_to_target": 0,
646
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
647
+ "description": "",
648
+ "target_delimiter": " ",
649
+ "fewshot_delimiter": "\n\n",
650
+ "num_fewshot": 0,
651
+ "metric_list": [
652
+ {
653
+ "metric": "acc"
654
+ }
655
+ ],
656
+ "output_type": "multiple_choice",
657
+ "repeats": 1,
658
+ "should_decontaminate": true,
659
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
660
+ "metadata": {
661
+ "version": 1.0
662
+ }
663
+ },
664
+ "blimp_determiner_noun_agreement_irregular_2": {
665
+ "task": "blimp_determiner_noun_agreement_irregular_2",
666
+ "group": "blimp",
667
+ "dataset_path": "blimp",
668
+ "dataset_name": "determiner_noun_agreement_irregular_2",
669
+ "validation_split": "train",
670
+ "doc_to_text": "",
671
+ "doc_to_target": 0,
672
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
673
+ "description": "",
674
+ "target_delimiter": " ",
675
+ "fewshot_delimiter": "\n\n",
676
+ "num_fewshot": 0,
677
+ "metric_list": [
678
+ {
679
+ "metric": "acc"
680
+ }
681
+ ],
682
+ "output_type": "multiple_choice",
683
+ "repeats": 1,
684
+ "should_decontaminate": true,
685
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
686
+ "metadata": {
687
+ "version": 1.0
688
+ }
689
+ },
690
+ "blimp_determiner_noun_agreement_with_adj_2": {
691
+ "task": "blimp_determiner_noun_agreement_with_adj_2",
692
+ "group": "blimp",
693
+ "dataset_path": "blimp",
694
+ "dataset_name": "determiner_noun_agreement_with_adj_2",
695
+ "validation_split": "train",
696
+ "doc_to_text": "",
697
+ "doc_to_target": 0,
698
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
699
+ "description": "",
700
+ "target_delimiter": " ",
701
+ "fewshot_delimiter": "\n\n",
702
+ "num_fewshot": 0,
703
+ "metric_list": [
704
+ {
705
+ "metric": "acc"
706
+ }
707
+ ],
708
+ "output_type": "multiple_choice",
709
+ "repeats": 1,
710
+ "should_decontaminate": true,
711
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
712
+ "metadata": {
713
+ "version": 1.0
714
+ }
715
+ },
716
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": {
717
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_1",
718
+ "group": "blimp",
719
+ "dataset_path": "blimp",
720
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_1",
721
+ "validation_split": "train",
722
+ "doc_to_text": "",
723
+ "doc_to_target": 0,
724
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
725
+ "description": "",
726
+ "target_delimiter": " ",
727
+ "fewshot_delimiter": "\n\n",
728
+ "num_fewshot": 0,
729
+ "metric_list": [
730
+ {
731
+ "metric": "acc"
732
+ }
733
+ ],
734
+ "output_type": "multiple_choice",
735
+ "repeats": 1,
736
+ "should_decontaminate": true,
737
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
738
+ "metadata": {
739
+ "version": 1.0
740
+ }
741
+ },
742
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": {
743
+ "task": "blimp_determiner_noun_agreement_with_adj_irregular_2",
744
+ "group": "blimp",
745
+ "dataset_path": "blimp",
746
+ "dataset_name": "determiner_noun_agreement_with_adj_irregular_2",
747
+ "validation_split": "train",
748
+ "doc_to_text": "",
749
+ "doc_to_target": 0,
750
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
751
+ "description": "",
752
+ "target_delimiter": " ",
753
+ "fewshot_delimiter": "\n\n",
754
+ "num_fewshot": 0,
755
+ "metric_list": [
756
+ {
757
+ "metric": "acc"
758
+ }
759
+ ],
760
+ "output_type": "multiple_choice",
761
+ "repeats": 1,
762
+ "should_decontaminate": true,
763
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
764
+ "metadata": {
765
+ "version": 1.0
766
+ }
767
+ },
768
+ "blimp_determiner_noun_agreement_with_adjective_1": {
769
+ "task": "blimp_determiner_noun_agreement_with_adjective_1",
770
+ "group": "blimp",
771
+ "dataset_path": "blimp",
772
+ "dataset_name": "determiner_noun_agreement_with_adjective_1",
773
+ "validation_split": "train",
774
+ "doc_to_text": "",
775
+ "doc_to_target": 0,
776
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
777
+ "description": "",
778
+ "target_delimiter": " ",
779
+ "fewshot_delimiter": "\n\n",
780
+ "num_fewshot": 0,
781
+ "metric_list": [
782
+ {
783
+ "metric": "acc"
784
+ }
785
+ ],
786
+ "output_type": "multiple_choice",
787
+ "repeats": 1,
788
+ "should_decontaminate": true,
789
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
790
+ "metadata": {
791
+ "version": 1.0
792
+ }
793
+ },
794
+ "blimp_distractor_agreement_relational_noun": {
795
+ "task": "blimp_distractor_agreement_relational_noun",
796
+ "group": "blimp",
797
+ "dataset_path": "blimp",
798
+ "dataset_name": "distractor_agreement_relational_noun",
799
+ "validation_split": "train",
800
+ "doc_to_text": "",
801
+ "doc_to_target": 0,
802
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
803
+ "description": "",
804
+ "target_delimiter": " ",
805
+ "fewshot_delimiter": "\n\n",
806
+ "num_fewshot": 0,
807
+ "metric_list": [
808
+ {
809
+ "metric": "acc"
810
+ }
811
+ ],
812
+ "output_type": "multiple_choice",
813
+ "repeats": 1,
814
+ "should_decontaminate": true,
815
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
816
+ "metadata": {
817
+ "version": 1.0
818
+ }
819
+ },
820
+ "blimp_distractor_agreement_relative_clause": {
821
+ "task": "blimp_distractor_agreement_relative_clause",
822
+ "group": "blimp",
823
+ "dataset_path": "blimp",
824
+ "dataset_name": "distractor_agreement_relative_clause",
825
+ "validation_split": "train",
826
+ "doc_to_text": "",
827
+ "doc_to_target": 0,
828
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
829
+ "description": "",
830
+ "target_delimiter": " ",
831
+ "fewshot_delimiter": "\n\n",
832
+ "num_fewshot": 0,
833
+ "metric_list": [
834
+ {
835
+ "metric": "acc"
836
+ }
837
+ ],
838
+ "output_type": "multiple_choice",
839
+ "repeats": 1,
840
+ "should_decontaminate": true,
841
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
842
+ "metadata": {
843
+ "version": 1.0
844
+ }
845
+ },
846
+ "blimp_drop_argument": {
847
+ "task": "blimp_drop_argument",
848
+ "group": "blimp",
849
+ "dataset_path": "blimp",
850
+ "dataset_name": "drop_argument",
851
+ "validation_split": "train",
852
+ "doc_to_text": "",
853
+ "doc_to_target": 0,
854
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
855
+ "description": "",
856
+ "target_delimiter": " ",
857
+ "fewshot_delimiter": "\n\n",
858
+ "num_fewshot": 0,
859
+ "metric_list": [
860
+ {
861
+ "metric": "acc"
862
+ }
863
+ ],
864
+ "output_type": "multiple_choice",
865
+ "repeats": 1,
866
+ "should_decontaminate": true,
867
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
868
+ "metadata": {
869
+ "version": 1.0
870
+ }
871
+ },
872
+ "blimp_ellipsis_n_bar_1": {
873
+ "task": "blimp_ellipsis_n_bar_1",
874
+ "group": "blimp",
875
+ "dataset_path": "blimp",
876
+ "dataset_name": "ellipsis_n_bar_1",
877
+ "validation_split": "train",
878
+ "doc_to_text": "",
879
+ "doc_to_target": 0,
880
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
881
+ "description": "",
882
+ "target_delimiter": " ",
883
+ "fewshot_delimiter": "\n\n",
884
+ "num_fewshot": 0,
885
+ "metric_list": [
886
+ {
887
+ "metric": "acc"
888
+ }
889
+ ],
890
+ "output_type": "multiple_choice",
891
+ "repeats": 1,
892
+ "should_decontaminate": true,
893
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
894
+ "metadata": {
895
+ "version": 1.0
896
+ }
897
+ },
898
+ "blimp_ellipsis_n_bar_2": {
899
+ "task": "blimp_ellipsis_n_bar_2",
900
+ "group": "blimp",
901
+ "dataset_path": "blimp",
902
+ "dataset_name": "ellipsis_n_bar_2",
903
+ "validation_split": "train",
904
+ "doc_to_text": "",
905
+ "doc_to_target": 0,
906
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
907
+ "description": "",
908
+ "target_delimiter": " ",
909
+ "fewshot_delimiter": "\n\n",
910
+ "num_fewshot": 0,
911
+ "metric_list": [
912
+ {
913
+ "metric": "acc"
914
+ }
915
+ ],
916
+ "output_type": "multiple_choice",
917
+ "repeats": 1,
918
+ "should_decontaminate": true,
919
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
920
+ "metadata": {
921
+ "version": 1.0
922
+ }
923
+ },
924
+ "blimp_existential_there_object_raising": {
925
+ "task": "blimp_existential_there_object_raising",
926
+ "group": "blimp",
927
+ "dataset_path": "blimp",
928
+ "dataset_name": "existential_there_object_raising",
929
+ "validation_split": "train",
930
+ "doc_to_text": "",
931
+ "doc_to_target": 0,
932
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
933
+ "description": "",
934
+ "target_delimiter": " ",
935
+ "fewshot_delimiter": "\n\n",
936
+ "num_fewshot": 0,
937
+ "metric_list": [
938
+ {
939
+ "metric": "acc"
940
+ }
941
+ ],
942
+ "output_type": "multiple_choice",
943
+ "repeats": 1,
944
+ "should_decontaminate": true,
945
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
946
+ "metadata": {
947
+ "version": 1.0
948
+ }
949
+ },
950
+ "blimp_existential_there_quantifiers_1": {
951
+ "task": "blimp_existential_there_quantifiers_1",
952
+ "group": "blimp",
953
+ "dataset_path": "blimp",
954
+ "dataset_name": "existential_there_quantifiers_1",
955
+ "validation_split": "train",
956
+ "doc_to_text": "",
957
+ "doc_to_target": 0,
958
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
959
+ "description": "",
960
+ "target_delimiter": " ",
961
+ "fewshot_delimiter": "\n\n",
962
+ "num_fewshot": 0,
963
+ "metric_list": [
964
+ {
965
+ "metric": "acc"
966
+ }
967
+ ],
968
+ "output_type": "multiple_choice",
969
+ "repeats": 1,
970
+ "should_decontaminate": true,
971
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
972
+ "metadata": {
973
+ "version": 1.0
974
+ }
975
+ },
976
+ "blimp_existential_there_quantifiers_2": {
977
+ "task": "blimp_existential_there_quantifiers_2",
978
+ "group": "blimp",
979
+ "dataset_path": "blimp",
980
+ "dataset_name": "existential_there_quantifiers_2",
981
+ "validation_split": "train",
982
+ "doc_to_text": "",
983
+ "doc_to_target": 0,
984
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
985
+ "description": "",
986
+ "target_delimiter": " ",
987
+ "fewshot_delimiter": "\n\n",
988
+ "num_fewshot": 0,
989
+ "metric_list": [
990
+ {
991
+ "metric": "acc"
992
+ }
993
+ ],
994
+ "output_type": "multiple_choice",
995
+ "repeats": 1,
996
+ "should_decontaminate": true,
997
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
998
+ "metadata": {
999
+ "version": 1.0
1000
+ }
1001
+ },
1002
+ "blimp_existential_there_subject_raising": {
1003
+ "task": "blimp_existential_there_subject_raising",
1004
+ "group": "blimp",
1005
+ "dataset_path": "blimp",
1006
+ "dataset_name": "existential_there_subject_raising",
1007
+ "validation_split": "train",
1008
+ "doc_to_text": "",
1009
+ "doc_to_target": 0,
1010
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1011
+ "description": "",
1012
+ "target_delimiter": " ",
1013
+ "fewshot_delimiter": "\n\n",
1014
+ "num_fewshot": 0,
1015
+ "metric_list": [
1016
+ {
1017
+ "metric": "acc"
1018
+ }
1019
+ ],
1020
+ "output_type": "multiple_choice",
1021
+ "repeats": 1,
1022
+ "should_decontaminate": true,
1023
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1024
+ "metadata": {
1025
+ "version": 1.0
1026
+ }
1027
+ },
1028
+ "blimp_expletive_it_object_raising": {
1029
+ "task": "blimp_expletive_it_object_raising",
1030
+ "group": "blimp",
1031
+ "dataset_path": "blimp",
1032
+ "dataset_name": "expletive_it_object_raising",
1033
+ "validation_split": "train",
1034
+ "doc_to_text": "",
1035
+ "doc_to_target": 0,
1036
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1037
+ "description": "",
1038
+ "target_delimiter": " ",
1039
+ "fewshot_delimiter": "\n\n",
1040
+ "num_fewshot": 0,
1041
+ "metric_list": [
1042
+ {
1043
+ "metric": "acc"
1044
+ }
1045
+ ],
1046
+ "output_type": "multiple_choice",
1047
+ "repeats": 1,
1048
+ "should_decontaminate": true,
1049
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1050
+ "metadata": {
1051
+ "version": 1.0
1052
+ }
1053
+ },
1054
+ "blimp_inchoative": {
1055
+ "task": "blimp_inchoative",
1056
+ "group": "blimp",
1057
+ "dataset_path": "blimp",
1058
+ "dataset_name": "inchoative",
1059
+ "validation_split": "train",
1060
+ "doc_to_text": "",
1061
+ "doc_to_target": 0,
1062
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1063
+ "description": "",
1064
+ "target_delimiter": " ",
1065
+ "fewshot_delimiter": "\n\n",
1066
+ "num_fewshot": 0,
1067
+ "metric_list": [
1068
+ {
1069
+ "metric": "acc"
1070
+ }
1071
+ ],
1072
+ "output_type": "multiple_choice",
1073
+ "repeats": 1,
1074
+ "should_decontaminate": true,
1075
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1076
+ "metadata": {
1077
+ "version": 1.0
1078
+ }
1079
+ },
1080
+ "blimp_intransitive": {
1081
+ "task": "blimp_intransitive",
1082
+ "group": "blimp",
1083
+ "dataset_path": "blimp",
1084
+ "dataset_name": "intransitive",
1085
+ "validation_split": "train",
1086
+ "doc_to_text": "",
1087
+ "doc_to_target": 0,
1088
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1089
+ "description": "",
1090
+ "target_delimiter": " ",
1091
+ "fewshot_delimiter": "\n\n",
1092
+ "num_fewshot": 0,
1093
+ "metric_list": [
1094
+ {
1095
+ "metric": "acc"
1096
+ }
1097
+ ],
1098
+ "output_type": "multiple_choice",
1099
+ "repeats": 1,
1100
+ "should_decontaminate": true,
1101
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1102
+ "metadata": {
1103
+ "version": 1.0
1104
+ }
1105
+ },
1106
+ "blimp_irregular_past_participle_adjectives": {
1107
+ "task": "blimp_irregular_past_participle_adjectives",
1108
+ "group": "blimp",
1109
+ "dataset_path": "blimp",
1110
+ "dataset_name": "irregular_past_participle_adjectives",
1111
+ "validation_split": "train",
1112
+ "doc_to_text": "",
1113
+ "doc_to_target": 0,
1114
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1115
+ "description": "",
1116
+ "target_delimiter": " ",
1117
+ "fewshot_delimiter": "\n\n",
1118
+ "num_fewshot": 0,
1119
+ "metric_list": [
1120
+ {
1121
+ "metric": "acc"
1122
+ }
1123
+ ],
1124
+ "output_type": "multiple_choice",
1125
+ "repeats": 1,
1126
+ "should_decontaminate": true,
1127
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1128
+ "metadata": {
1129
+ "version": 1.0
1130
+ }
1131
+ },
1132
+ "blimp_irregular_past_participle_verbs": {
1133
+ "task": "blimp_irregular_past_participle_verbs",
1134
+ "group": "blimp",
1135
+ "dataset_path": "blimp",
1136
+ "dataset_name": "irregular_past_participle_verbs",
1137
+ "validation_split": "train",
1138
+ "doc_to_text": "",
1139
+ "doc_to_target": 0,
1140
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1141
+ "description": "",
1142
+ "target_delimiter": " ",
1143
+ "fewshot_delimiter": "\n\n",
1144
+ "num_fewshot": 0,
1145
+ "metric_list": [
1146
+ {
1147
+ "metric": "acc"
1148
+ }
1149
+ ],
1150
+ "output_type": "multiple_choice",
1151
+ "repeats": 1,
1152
+ "should_decontaminate": true,
1153
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1154
+ "metadata": {
1155
+ "version": 1.0
1156
+ }
1157
+ },
1158
+ "blimp_irregular_plural_subject_verb_agreement_1": {
1159
+ "task": "blimp_irregular_plural_subject_verb_agreement_1",
1160
+ "group": "blimp",
1161
+ "dataset_path": "blimp",
1162
+ "dataset_name": "irregular_plural_subject_verb_agreement_1",
1163
+ "validation_split": "train",
1164
+ "doc_to_text": "",
1165
+ "doc_to_target": 0,
1166
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1167
+ "description": "",
1168
+ "target_delimiter": " ",
1169
+ "fewshot_delimiter": "\n\n",
1170
+ "num_fewshot": 0,
1171
+ "metric_list": [
1172
+ {
1173
+ "metric": "acc"
1174
+ }
1175
+ ],
1176
+ "output_type": "multiple_choice",
1177
+ "repeats": 1,
1178
+ "should_decontaminate": true,
1179
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1180
+ "metadata": {
1181
+ "version": 1.0
1182
+ }
1183
+ },
1184
+ "blimp_irregular_plural_subject_verb_agreement_2": {
1185
+ "task": "blimp_irregular_plural_subject_verb_agreement_2",
1186
+ "group": "blimp",
1187
+ "dataset_path": "blimp",
1188
+ "dataset_name": "irregular_plural_subject_verb_agreement_2",
1189
+ "validation_split": "train",
1190
+ "doc_to_text": "",
1191
+ "doc_to_target": 0,
1192
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1193
+ "description": "",
1194
+ "target_delimiter": " ",
1195
+ "fewshot_delimiter": "\n\n",
1196
+ "num_fewshot": 0,
1197
+ "metric_list": [
1198
+ {
1199
+ "metric": "acc"
1200
+ }
1201
+ ],
1202
+ "output_type": "multiple_choice",
1203
+ "repeats": 1,
1204
+ "should_decontaminate": true,
1205
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1206
+ "metadata": {
1207
+ "version": 1.0
1208
+ }
1209
+ },
1210
+ "blimp_left_branch_island_echo_question": {
1211
+ "task": "blimp_left_branch_island_echo_question",
1212
+ "group": "blimp",
1213
+ "dataset_path": "blimp",
1214
+ "dataset_name": "left_branch_island_echo_question",
1215
+ "validation_split": "train",
1216
+ "doc_to_text": "",
1217
+ "doc_to_target": 0,
1218
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1219
+ "description": "",
1220
+ "target_delimiter": " ",
1221
+ "fewshot_delimiter": "\n\n",
1222
+ "num_fewshot": 0,
1223
+ "metric_list": [
1224
+ {
1225
+ "metric": "acc"
1226
+ }
1227
+ ],
1228
+ "output_type": "multiple_choice",
1229
+ "repeats": 1,
1230
+ "should_decontaminate": true,
1231
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1232
+ "metadata": {
1233
+ "version": 1.0
1234
+ }
1235
+ },
1236
+ "blimp_left_branch_island_simple_question": {
1237
+ "task": "blimp_left_branch_island_simple_question",
1238
+ "group": "blimp",
1239
+ "dataset_path": "blimp",
1240
+ "dataset_name": "left_branch_island_simple_question",
1241
+ "validation_split": "train",
1242
+ "doc_to_text": "",
1243
+ "doc_to_target": 0,
1244
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1245
+ "description": "",
1246
+ "target_delimiter": " ",
1247
+ "fewshot_delimiter": "\n\n",
1248
+ "num_fewshot": 0,
1249
+ "metric_list": [
1250
+ {
1251
+ "metric": "acc"
1252
+ }
1253
+ ],
1254
+ "output_type": "multiple_choice",
1255
+ "repeats": 1,
1256
+ "should_decontaminate": true,
1257
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1258
+ "metadata": {
1259
+ "version": 1.0
1260
+ }
1261
+ },
1262
+ "blimp_matrix_question_npi_licensor_present": {
1263
+ "task": "blimp_matrix_question_npi_licensor_present",
1264
+ "group": "blimp",
1265
+ "dataset_path": "blimp",
1266
+ "dataset_name": "matrix_question_npi_licensor_present",
1267
+ "validation_split": "train",
1268
+ "doc_to_text": "",
1269
+ "doc_to_target": 0,
1270
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1271
+ "description": "",
1272
+ "target_delimiter": " ",
1273
+ "fewshot_delimiter": "\n\n",
1274
+ "num_fewshot": 0,
1275
+ "metric_list": [
1276
+ {
1277
+ "metric": "acc"
1278
+ }
1279
+ ],
1280
+ "output_type": "multiple_choice",
1281
+ "repeats": 1,
1282
+ "should_decontaminate": true,
1283
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1284
+ "metadata": {
1285
+ "version": 1.0
1286
+ }
1287
+ },
1288
+ "blimp_npi_present_1": {
1289
+ "task": "blimp_npi_present_1",
1290
+ "group": "blimp",
1291
+ "dataset_path": "blimp",
1292
+ "dataset_name": "npi_present_1",
1293
+ "validation_split": "train",
1294
+ "doc_to_text": "",
1295
+ "doc_to_target": 0,
1296
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1297
+ "description": "",
1298
+ "target_delimiter": " ",
1299
+ "fewshot_delimiter": "\n\n",
1300
+ "num_fewshot": 0,
1301
+ "metric_list": [
1302
+ {
1303
+ "metric": "acc"
1304
+ }
1305
+ ],
1306
+ "output_type": "multiple_choice",
1307
+ "repeats": 1,
1308
+ "should_decontaminate": true,
1309
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1310
+ "metadata": {
1311
+ "version": 1.0
1312
+ }
1313
+ },
1314
+ "blimp_npi_present_2": {
1315
+ "task": "blimp_npi_present_2",
1316
+ "group": "blimp",
1317
+ "dataset_path": "blimp",
1318
+ "dataset_name": "npi_present_2",
1319
+ "validation_split": "train",
1320
+ "doc_to_text": "",
1321
+ "doc_to_target": 0,
1322
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1323
+ "description": "",
1324
+ "target_delimiter": " ",
1325
+ "fewshot_delimiter": "\n\n",
1326
+ "num_fewshot": 0,
1327
+ "metric_list": [
1328
+ {
1329
+ "metric": "acc"
1330
+ }
1331
+ ],
1332
+ "output_type": "multiple_choice",
1333
+ "repeats": 1,
1334
+ "should_decontaminate": true,
1335
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1336
+ "metadata": {
1337
+ "version": 1.0
1338
+ }
1339
+ },
1340
+ "blimp_only_npi_licensor_present": {
1341
+ "task": "blimp_only_npi_licensor_present",
1342
+ "group": "blimp",
1343
+ "dataset_path": "blimp",
1344
+ "dataset_name": "only_npi_licensor_present",
1345
+ "validation_split": "train",
1346
+ "doc_to_text": "",
1347
+ "doc_to_target": 0,
1348
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1349
+ "description": "",
1350
+ "target_delimiter": " ",
1351
+ "fewshot_delimiter": "\n\n",
1352
+ "num_fewshot": 0,
1353
+ "metric_list": [
1354
+ {
1355
+ "metric": "acc"
1356
+ }
1357
+ ],
1358
+ "output_type": "multiple_choice",
1359
+ "repeats": 1,
1360
+ "should_decontaminate": true,
1361
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1362
+ "metadata": {
1363
+ "version": 1.0
1364
+ }
1365
+ },
1366
+ "blimp_only_npi_scope": {
1367
+ "task": "blimp_only_npi_scope",
1368
+ "group": "blimp",
1369
+ "dataset_path": "blimp",
1370
+ "dataset_name": "only_npi_scope",
1371
+ "validation_split": "train",
1372
+ "doc_to_text": "",
1373
+ "doc_to_target": 0,
1374
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1375
+ "description": "",
1376
+ "target_delimiter": " ",
1377
+ "fewshot_delimiter": "\n\n",
1378
+ "num_fewshot": 0,
1379
+ "metric_list": [
1380
+ {
1381
+ "metric": "acc"
1382
+ }
1383
+ ],
1384
+ "output_type": "multiple_choice",
1385
+ "repeats": 1,
1386
+ "should_decontaminate": true,
1387
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1388
+ "metadata": {
1389
+ "version": 1.0
1390
+ }
1391
+ },
1392
+ "blimp_passive_1": {
1393
+ "task": "blimp_passive_1",
1394
+ "group": "blimp",
1395
+ "dataset_path": "blimp",
1396
+ "dataset_name": "passive_1",
1397
+ "validation_split": "train",
1398
+ "doc_to_text": "",
1399
+ "doc_to_target": 0,
1400
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1401
+ "description": "",
1402
+ "target_delimiter": " ",
1403
+ "fewshot_delimiter": "\n\n",
1404
+ "num_fewshot": 0,
1405
+ "metric_list": [
1406
+ {
1407
+ "metric": "acc"
1408
+ }
1409
+ ],
1410
+ "output_type": "multiple_choice",
1411
+ "repeats": 1,
1412
+ "should_decontaminate": true,
1413
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1414
+ "metadata": {
1415
+ "version": 1.0
1416
+ }
1417
+ },
1418
+ "blimp_passive_2": {
1419
+ "task": "blimp_passive_2",
1420
+ "group": "blimp",
1421
+ "dataset_path": "blimp",
1422
+ "dataset_name": "passive_2",
1423
+ "validation_split": "train",
1424
+ "doc_to_text": "",
1425
+ "doc_to_target": 0,
1426
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1427
+ "description": "",
1428
+ "target_delimiter": " ",
1429
+ "fewshot_delimiter": "\n\n",
1430
+ "num_fewshot": 0,
1431
+ "metric_list": [
1432
+ {
1433
+ "metric": "acc"
1434
+ }
1435
+ ],
1436
+ "output_type": "multiple_choice",
1437
+ "repeats": 1,
1438
+ "should_decontaminate": true,
1439
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1440
+ "metadata": {
1441
+ "version": 1.0
1442
+ }
1443
+ },
1444
+ "blimp_principle_A_c_command": {
1445
+ "task": "blimp_principle_A_c_command",
1446
+ "group": "blimp",
1447
+ "dataset_path": "blimp",
1448
+ "dataset_name": "principle_A_c_command",
1449
+ "validation_split": "train",
1450
+ "doc_to_text": "",
1451
+ "doc_to_target": 0,
1452
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1453
+ "description": "",
1454
+ "target_delimiter": " ",
1455
+ "fewshot_delimiter": "\n\n",
1456
+ "num_fewshot": 0,
1457
+ "metric_list": [
1458
+ {
1459
+ "metric": "acc"
1460
+ }
1461
+ ],
1462
+ "output_type": "multiple_choice",
1463
+ "repeats": 1,
1464
+ "should_decontaminate": true,
1465
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1466
+ "metadata": {
1467
+ "version": 1.0
1468
+ }
1469
+ },
1470
+ "blimp_principle_A_case_1": {
1471
+ "task": "blimp_principle_A_case_1",
1472
+ "group": "blimp",
1473
+ "dataset_path": "blimp",
1474
+ "dataset_name": "principle_A_case_1",
1475
+ "validation_split": "train",
1476
+ "doc_to_text": "",
1477
+ "doc_to_target": 0,
1478
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1479
+ "description": "",
1480
+ "target_delimiter": " ",
1481
+ "fewshot_delimiter": "\n\n",
1482
+ "num_fewshot": 0,
1483
+ "metric_list": [
1484
+ {
1485
+ "metric": "acc"
1486
+ }
1487
+ ],
1488
+ "output_type": "multiple_choice",
1489
+ "repeats": 1,
1490
+ "should_decontaminate": true,
1491
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1492
+ "metadata": {
1493
+ "version": 1.0
1494
+ }
1495
+ },
1496
+ "blimp_principle_A_case_2": {
1497
+ "task": "blimp_principle_A_case_2",
1498
+ "group": "blimp",
1499
+ "dataset_path": "blimp",
1500
+ "dataset_name": "principle_A_case_2",
1501
+ "validation_split": "train",
1502
+ "doc_to_text": "",
1503
+ "doc_to_target": 0,
1504
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1505
+ "description": "",
1506
+ "target_delimiter": " ",
1507
+ "fewshot_delimiter": "\n\n",
1508
+ "num_fewshot": 0,
1509
+ "metric_list": [
1510
+ {
1511
+ "metric": "acc"
1512
+ }
1513
+ ],
1514
+ "output_type": "multiple_choice",
1515
+ "repeats": 1,
1516
+ "should_decontaminate": true,
1517
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1518
+ "metadata": {
1519
+ "version": 1.0
1520
+ }
1521
+ },
1522
+ "blimp_principle_A_domain_1": {
1523
+ "task": "blimp_principle_A_domain_1",
1524
+ "group": "blimp",
1525
+ "dataset_path": "blimp",
1526
+ "dataset_name": "principle_A_domain_1",
1527
+ "validation_split": "train",
1528
+ "doc_to_text": "",
1529
+ "doc_to_target": 0,
1530
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1531
+ "description": "",
1532
+ "target_delimiter": " ",
1533
+ "fewshot_delimiter": "\n\n",
1534
+ "num_fewshot": 0,
1535
+ "metric_list": [
1536
+ {
1537
+ "metric": "acc"
1538
+ }
1539
+ ],
1540
+ "output_type": "multiple_choice",
1541
+ "repeats": 1,
1542
+ "should_decontaminate": true,
1543
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1544
+ "metadata": {
1545
+ "version": 1.0
1546
+ }
1547
+ },
1548
+ "blimp_principle_A_domain_2": {
1549
+ "task": "blimp_principle_A_domain_2",
1550
+ "group": "blimp",
1551
+ "dataset_path": "blimp",
1552
+ "dataset_name": "principle_A_domain_2",
1553
+ "validation_split": "train",
1554
+ "doc_to_text": "",
1555
+ "doc_to_target": 0,
1556
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1557
+ "description": "",
1558
+ "target_delimiter": " ",
1559
+ "fewshot_delimiter": "\n\n",
1560
+ "num_fewshot": 0,
1561
+ "metric_list": [
1562
+ {
1563
+ "metric": "acc"
1564
+ }
1565
+ ],
1566
+ "output_type": "multiple_choice",
1567
+ "repeats": 1,
1568
+ "should_decontaminate": true,
1569
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1570
+ "metadata": {
1571
+ "version": 1.0
1572
+ }
1573
+ },
1574
+ "blimp_principle_A_domain_3": {
1575
+ "task": "blimp_principle_A_domain_3",
1576
+ "group": "blimp",
1577
+ "dataset_path": "blimp",
1578
+ "dataset_name": "principle_A_domain_3",
1579
+ "validation_split": "train",
1580
+ "doc_to_text": "",
1581
+ "doc_to_target": 0,
1582
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1583
+ "description": "",
1584
+ "target_delimiter": " ",
1585
+ "fewshot_delimiter": "\n\n",
1586
+ "num_fewshot": 0,
1587
+ "metric_list": [
1588
+ {
1589
+ "metric": "acc"
1590
+ }
1591
+ ],
1592
+ "output_type": "multiple_choice",
1593
+ "repeats": 1,
1594
+ "should_decontaminate": true,
1595
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1596
+ "metadata": {
1597
+ "version": 1.0
1598
+ }
1599
+ },
1600
+ "blimp_principle_A_reconstruction": {
1601
+ "task": "blimp_principle_A_reconstruction",
1602
+ "group": "blimp",
1603
+ "dataset_path": "blimp",
1604
+ "dataset_name": "principle_A_reconstruction",
1605
+ "validation_split": "train",
1606
+ "doc_to_text": "",
1607
+ "doc_to_target": 0,
1608
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1609
+ "description": "",
1610
+ "target_delimiter": " ",
1611
+ "fewshot_delimiter": "\n\n",
1612
+ "num_fewshot": 0,
1613
+ "metric_list": [
1614
+ {
1615
+ "metric": "acc"
1616
+ }
1617
+ ],
1618
+ "output_type": "multiple_choice",
1619
+ "repeats": 1,
1620
+ "should_decontaminate": true,
1621
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1622
+ "metadata": {
1623
+ "version": 1.0
1624
+ }
1625
+ },
1626
+ "blimp_regular_plural_subject_verb_agreement_1": {
1627
+ "task": "blimp_regular_plural_subject_verb_agreement_1",
1628
+ "group": "blimp",
1629
+ "dataset_path": "blimp",
1630
+ "dataset_name": "regular_plural_subject_verb_agreement_1",
1631
+ "validation_split": "train",
1632
+ "doc_to_text": "",
1633
+ "doc_to_target": 0,
1634
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1635
+ "description": "",
1636
+ "target_delimiter": " ",
1637
+ "fewshot_delimiter": "\n\n",
1638
+ "num_fewshot": 0,
1639
+ "metric_list": [
1640
+ {
1641
+ "metric": "acc"
1642
+ }
1643
+ ],
1644
+ "output_type": "multiple_choice",
1645
+ "repeats": 1,
1646
+ "should_decontaminate": true,
1647
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1648
+ "metadata": {
1649
+ "version": 1.0
1650
+ }
1651
+ },
1652
+ "blimp_regular_plural_subject_verb_agreement_2": {
1653
+ "task": "blimp_regular_plural_subject_verb_agreement_2",
1654
+ "group": "blimp",
1655
+ "dataset_path": "blimp",
1656
+ "dataset_name": "regular_plural_subject_verb_agreement_2",
1657
+ "validation_split": "train",
1658
+ "doc_to_text": "",
1659
+ "doc_to_target": 0,
1660
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1661
+ "description": "",
1662
+ "target_delimiter": " ",
1663
+ "fewshot_delimiter": "\n\n",
1664
+ "num_fewshot": 0,
1665
+ "metric_list": [
1666
+ {
1667
+ "metric": "acc"
1668
+ }
1669
+ ],
1670
+ "output_type": "multiple_choice",
1671
+ "repeats": 1,
1672
+ "should_decontaminate": true,
1673
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1674
+ "metadata": {
1675
+ "version": 1.0
1676
+ }
1677
+ },
1678
+ "blimp_sentential_negation_npi_licensor_present": {
1679
+ "task": "blimp_sentential_negation_npi_licensor_present",
1680
+ "group": "blimp",
1681
+ "dataset_path": "blimp",
1682
+ "dataset_name": "sentential_negation_npi_licensor_present",
1683
+ "validation_split": "train",
1684
+ "doc_to_text": "",
1685
+ "doc_to_target": 0,
1686
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1687
+ "description": "",
1688
+ "target_delimiter": " ",
1689
+ "fewshot_delimiter": "\n\n",
1690
+ "num_fewshot": 0,
1691
+ "metric_list": [
1692
+ {
1693
+ "metric": "acc"
1694
+ }
1695
+ ],
1696
+ "output_type": "multiple_choice",
1697
+ "repeats": 1,
1698
+ "should_decontaminate": true,
1699
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1700
+ "metadata": {
1701
+ "version": 1.0
1702
+ }
1703
+ },
1704
+ "blimp_sentential_negation_npi_scope": {
1705
+ "task": "blimp_sentential_negation_npi_scope",
1706
+ "group": "blimp",
1707
+ "dataset_path": "blimp",
1708
+ "dataset_name": "sentential_negation_npi_scope",
1709
+ "validation_split": "train",
1710
+ "doc_to_text": "",
1711
+ "doc_to_target": 0,
1712
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1713
+ "description": "",
1714
+ "target_delimiter": " ",
1715
+ "fewshot_delimiter": "\n\n",
1716
+ "num_fewshot": 0,
1717
+ "metric_list": [
1718
+ {
1719
+ "metric": "acc"
1720
+ }
1721
+ ],
1722
+ "output_type": "multiple_choice",
1723
+ "repeats": 1,
1724
+ "should_decontaminate": true,
1725
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1726
+ "metadata": {
1727
+ "version": 1.0
1728
+ }
1729
+ },
1730
+ "blimp_sentential_subject_island": {
1731
+ "task": "blimp_sentential_subject_island",
1732
+ "group": "blimp",
1733
+ "dataset_path": "blimp",
1734
+ "dataset_name": "sentential_subject_island",
1735
+ "validation_split": "train",
1736
+ "doc_to_text": "",
1737
+ "doc_to_target": 0,
1738
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1739
+ "description": "",
1740
+ "target_delimiter": " ",
1741
+ "fewshot_delimiter": "\n\n",
1742
+ "num_fewshot": 0,
1743
+ "metric_list": [
1744
+ {
1745
+ "metric": "acc"
1746
+ }
1747
+ ],
1748
+ "output_type": "multiple_choice",
1749
+ "repeats": 1,
1750
+ "should_decontaminate": true,
1751
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1752
+ "metadata": {
1753
+ "version": 1.0
1754
+ }
1755
+ },
1756
+ "blimp_superlative_quantifiers_1": {
1757
+ "task": "blimp_superlative_quantifiers_1",
1758
+ "group": "blimp",
1759
+ "dataset_path": "blimp",
1760
+ "dataset_name": "superlative_quantifiers_1",
1761
+ "validation_split": "train",
1762
+ "doc_to_text": "",
1763
+ "doc_to_target": 0,
1764
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1765
+ "description": "",
1766
+ "target_delimiter": " ",
1767
+ "fewshot_delimiter": "\n\n",
1768
+ "num_fewshot": 0,
1769
+ "metric_list": [
1770
+ {
1771
+ "metric": "acc"
1772
+ }
1773
+ ],
1774
+ "output_type": "multiple_choice",
1775
+ "repeats": 1,
1776
+ "should_decontaminate": true,
1777
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1778
+ "metadata": {
1779
+ "version": 1.0
1780
+ }
1781
+ },
1782
+ "blimp_superlative_quantifiers_2": {
1783
+ "task": "blimp_superlative_quantifiers_2",
1784
+ "group": "blimp",
1785
+ "dataset_path": "blimp",
1786
+ "dataset_name": "superlative_quantifiers_2",
1787
+ "validation_split": "train",
1788
+ "doc_to_text": "",
1789
+ "doc_to_target": 0,
1790
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1791
+ "description": "",
1792
+ "target_delimiter": " ",
1793
+ "fewshot_delimiter": "\n\n",
1794
+ "num_fewshot": 0,
1795
+ "metric_list": [
1796
+ {
1797
+ "metric": "acc"
1798
+ }
1799
+ ],
1800
+ "output_type": "multiple_choice",
1801
+ "repeats": 1,
1802
+ "should_decontaminate": true,
1803
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1804
+ "metadata": {
1805
+ "version": 1.0
1806
+ }
1807
+ },
1808
+ "blimp_tough_vs_raising_1": {
1809
+ "task": "blimp_tough_vs_raising_1",
1810
+ "group": "blimp",
1811
+ "dataset_path": "blimp",
1812
+ "dataset_name": "tough_vs_raising_1",
1813
+ "validation_split": "train",
1814
+ "doc_to_text": "",
1815
+ "doc_to_target": 0,
1816
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1817
+ "description": "",
1818
+ "target_delimiter": " ",
1819
+ "fewshot_delimiter": "\n\n",
1820
+ "num_fewshot": 0,
1821
+ "metric_list": [
1822
+ {
1823
+ "metric": "acc"
1824
+ }
1825
+ ],
1826
+ "output_type": "multiple_choice",
1827
+ "repeats": 1,
1828
+ "should_decontaminate": true,
1829
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1830
+ "metadata": {
1831
+ "version": 1.0
1832
+ }
1833
+ },
1834
+ "blimp_tough_vs_raising_2": {
1835
+ "task": "blimp_tough_vs_raising_2",
1836
+ "group": "blimp",
1837
+ "dataset_path": "blimp",
1838
+ "dataset_name": "tough_vs_raising_2",
1839
+ "validation_split": "train",
1840
+ "doc_to_text": "",
1841
+ "doc_to_target": 0,
1842
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1843
+ "description": "",
1844
+ "target_delimiter": " ",
1845
+ "fewshot_delimiter": "\n\n",
1846
+ "num_fewshot": 0,
1847
+ "metric_list": [
1848
+ {
1849
+ "metric": "acc"
1850
+ }
1851
+ ],
1852
+ "output_type": "multiple_choice",
1853
+ "repeats": 1,
1854
+ "should_decontaminate": true,
1855
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1856
+ "metadata": {
1857
+ "version": 1.0
1858
+ }
1859
+ },
1860
+ "blimp_transitive": {
1861
+ "task": "blimp_transitive",
1862
+ "group": "blimp",
1863
+ "dataset_path": "blimp",
1864
+ "dataset_name": "transitive",
1865
+ "validation_split": "train",
1866
+ "doc_to_text": "",
1867
+ "doc_to_target": 0,
1868
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1869
+ "description": "",
1870
+ "target_delimiter": " ",
1871
+ "fewshot_delimiter": "\n\n",
1872
+ "num_fewshot": 0,
1873
+ "metric_list": [
1874
+ {
1875
+ "metric": "acc"
1876
+ }
1877
+ ],
1878
+ "output_type": "multiple_choice",
1879
+ "repeats": 1,
1880
+ "should_decontaminate": true,
1881
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1882
+ "metadata": {
1883
+ "version": 1.0
1884
+ }
1885
+ },
1886
+ "blimp_wh_island": {
1887
+ "task": "blimp_wh_island",
1888
+ "group": "blimp",
1889
+ "dataset_path": "blimp",
1890
+ "dataset_name": "wh_island",
1891
+ "validation_split": "train",
1892
+ "doc_to_text": "",
1893
+ "doc_to_target": 0,
1894
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1895
+ "description": "",
1896
+ "target_delimiter": " ",
1897
+ "fewshot_delimiter": "\n\n",
1898
+ "num_fewshot": 0,
1899
+ "metric_list": [
1900
+ {
1901
+ "metric": "acc"
1902
+ }
1903
+ ],
1904
+ "output_type": "multiple_choice",
1905
+ "repeats": 1,
1906
+ "should_decontaminate": true,
1907
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1908
+ "metadata": {
1909
+ "version": 1.0
1910
+ }
1911
+ },
1912
+ "blimp_wh_questions_object_gap": {
1913
+ "task": "blimp_wh_questions_object_gap",
1914
+ "group": "blimp",
1915
+ "dataset_path": "blimp",
1916
+ "dataset_name": "wh_questions_object_gap",
1917
+ "validation_split": "train",
1918
+ "doc_to_text": "",
1919
+ "doc_to_target": 0,
1920
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1921
+ "description": "",
1922
+ "target_delimiter": " ",
1923
+ "fewshot_delimiter": "\n\n",
1924
+ "num_fewshot": 0,
1925
+ "metric_list": [
1926
+ {
1927
+ "metric": "acc"
1928
+ }
1929
+ ],
1930
+ "output_type": "multiple_choice",
1931
+ "repeats": 1,
1932
+ "should_decontaminate": true,
1933
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1934
+ "metadata": {
1935
+ "version": 1.0
1936
+ }
1937
+ },
1938
+ "blimp_wh_questions_subject_gap": {
1939
+ "task": "blimp_wh_questions_subject_gap",
1940
+ "group": "blimp",
1941
+ "dataset_path": "blimp",
1942
+ "dataset_name": "wh_questions_subject_gap",
1943
+ "validation_split": "train",
1944
+ "doc_to_text": "",
1945
+ "doc_to_target": 0,
1946
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1947
+ "description": "",
1948
+ "target_delimiter": " ",
1949
+ "fewshot_delimiter": "\n\n",
1950
+ "num_fewshot": 0,
1951
+ "metric_list": [
1952
+ {
1953
+ "metric": "acc"
1954
+ }
1955
+ ],
1956
+ "output_type": "multiple_choice",
1957
+ "repeats": 1,
1958
+ "should_decontaminate": true,
1959
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1960
+ "metadata": {
1961
+ "version": 1.0
1962
+ }
1963
+ },
1964
+ "blimp_wh_questions_subject_gap_long_distance": {
1965
+ "task": "blimp_wh_questions_subject_gap_long_distance",
1966
+ "group": "blimp",
1967
+ "dataset_path": "blimp",
1968
+ "dataset_name": "wh_questions_subject_gap_long_distance",
1969
+ "validation_split": "train",
1970
+ "doc_to_text": "",
1971
+ "doc_to_target": 0,
1972
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1973
+ "description": "",
1974
+ "target_delimiter": " ",
1975
+ "fewshot_delimiter": "\n\n",
1976
+ "num_fewshot": 0,
1977
+ "metric_list": [
1978
+ {
1979
+ "metric": "acc"
1980
+ }
1981
+ ],
1982
+ "output_type": "multiple_choice",
1983
+ "repeats": 1,
1984
+ "should_decontaminate": true,
1985
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
1986
+ "metadata": {
1987
+ "version": 1.0
1988
+ }
1989
+ },
1990
+ "blimp_wh_vs_that_no_gap": {
1991
+ "task": "blimp_wh_vs_that_no_gap",
1992
+ "group": "blimp",
1993
+ "dataset_path": "blimp",
1994
+ "dataset_name": "wh_vs_that_no_gap",
1995
+ "validation_split": "train",
1996
+ "doc_to_text": "",
1997
+ "doc_to_target": 0,
1998
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
1999
+ "description": "",
2000
+ "target_delimiter": " ",
2001
+ "fewshot_delimiter": "\n\n",
2002
+ "num_fewshot": 0,
2003
+ "metric_list": [
2004
+ {
2005
+ "metric": "acc"
2006
+ }
2007
+ ],
2008
+ "output_type": "multiple_choice",
2009
+ "repeats": 1,
2010
+ "should_decontaminate": true,
2011
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2012
+ "metadata": {
2013
+ "version": 1.0
2014
+ }
2015
+ },
2016
+ "blimp_wh_vs_that_no_gap_long_distance": {
2017
+ "task": "blimp_wh_vs_that_no_gap_long_distance",
2018
+ "group": "blimp",
2019
+ "dataset_path": "blimp",
2020
+ "dataset_name": "wh_vs_that_no_gap_long_distance",
2021
+ "validation_split": "train",
2022
+ "doc_to_text": "",
2023
+ "doc_to_target": 0,
2024
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2025
+ "description": "",
2026
+ "target_delimiter": " ",
2027
+ "fewshot_delimiter": "\n\n",
2028
+ "num_fewshot": 0,
2029
+ "metric_list": [
2030
+ {
2031
+ "metric": "acc"
2032
+ }
2033
+ ],
2034
+ "output_type": "multiple_choice",
2035
+ "repeats": 1,
2036
+ "should_decontaminate": true,
2037
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2038
+ "metadata": {
2039
+ "version": 1.0
2040
+ }
2041
+ },
2042
+ "blimp_wh_vs_that_with_gap": {
2043
+ "task": "blimp_wh_vs_that_with_gap",
2044
+ "group": "blimp",
2045
+ "dataset_path": "blimp",
2046
+ "dataset_name": "wh_vs_that_with_gap",
2047
+ "validation_split": "train",
2048
+ "doc_to_text": "",
2049
+ "doc_to_target": 0,
2050
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2051
+ "description": "",
2052
+ "target_delimiter": " ",
2053
+ "fewshot_delimiter": "\n\n",
2054
+ "num_fewshot": 0,
2055
+ "metric_list": [
2056
+ {
2057
+ "metric": "acc"
2058
+ }
2059
+ ],
2060
+ "output_type": "multiple_choice",
2061
+ "repeats": 1,
2062
+ "should_decontaminate": true,
2063
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2064
+ "metadata": {
2065
+ "version": 1.0
2066
+ }
2067
+ },
2068
+ "blimp_wh_vs_that_with_gap_long_distance": {
2069
+ "task": "blimp_wh_vs_that_with_gap_long_distance",
2070
+ "group": "blimp",
2071
+ "dataset_path": "blimp",
2072
+ "dataset_name": "wh_vs_that_with_gap_long_distance",
2073
+ "validation_split": "train",
2074
+ "doc_to_text": "",
2075
+ "doc_to_target": 0,
2076
+ "doc_to_choice": "{{[sentence_good, sentence_bad]}}",
2077
+ "description": "",
2078
+ "target_delimiter": " ",
2079
+ "fewshot_delimiter": "\n\n",
2080
+ "num_fewshot": 0,
2081
+ "metric_list": [
2082
+ {
2083
+ "metric": "acc"
2084
+ }
2085
+ ],
2086
+ "output_type": "multiple_choice",
2087
+ "repeats": 1,
2088
+ "should_decontaminate": true,
2089
+ "doc_to_decontamination_query": "{{sentence_good}} {{sentence_bad}}",
2090
+ "metadata": {
2091
+ "version": 1.0
2092
+ }
2093
+ }
2094
+ },
2095
+ "versions": {
2096
+ "blimp": "N/A",
2097
+ "blimp_adjunct_island": 1.0,
2098
+ "blimp_anaphor_gender_agreement": 1.0,
2099
+ "blimp_anaphor_number_agreement": 1.0,
2100
+ "blimp_animate_subject_passive": 1.0,
2101
+ "blimp_animate_subject_trans": 1.0,
2102
+ "blimp_causative": 1.0,
2103
+ "blimp_complex_NP_island": 1.0,
2104
+ "blimp_coordinate_structure_constraint_complex_left_branch": 1.0,
2105
+ "blimp_coordinate_structure_constraint_object_extraction": 1.0,
2106
+ "blimp_determiner_noun_agreement_1": 1.0,
2107
+ "blimp_determiner_noun_agreement_2": 1.0,
2108
+ "blimp_determiner_noun_agreement_irregular_1": 1.0,
2109
+ "blimp_determiner_noun_agreement_irregular_2": 1.0,
2110
+ "blimp_determiner_noun_agreement_with_adj_2": 1.0,
2111
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 1.0,
2112
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 1.0,
2113
+ "blimp_determiner_noun_agreement_with_adjective_1": 1.0,
2114
+ "blimp_distractor_agreement_relational_noun": 1.0,
2115
+ "blimp_distractor_agreement_relative_clause": 1.0,
2116
+ "blimp_drop_argument": 1.0,
2117
+ "blimp_ellipsis_n_bar_1": 1.0,
2118
+ "blimp_ellipsis_n_bar_2": 1.0,
2119
+ "blimp_existential_there_object_raising": 1.0,
2120
+ "blimp_existential_there_quantifiers_1": 1.0,
2121
+ "blimp_existential_there_quantifiers_2": 1.0,
2122
+ "blimp_existential_there_subject_raising": 1.0,
2123
+ "blimp_expletive_it_object_raising": 1.0,
2124
+ "blimp_inchoative": 1.0,
2125
+ "blimp_intransitive": 1.0,
2126
+ "blimp_irregular_past_participle_adjectives": 1.0,
2127
+ "blimp_irregular_past_participle_verbs": 1.0,
2128
+ "blimp_irregular_plural_subject_verb_agreement_1": 1.0,
2129
+ "blimp_irregular_plural_subject_verb_agreement_2": 1.0,
2130
+ "blimp_left_branch_island_echo_question": 1.0,
2131
+ "blimp_left_branch_island_simple_question": 1.0,
2132
+ "blimp_matrix_question_npi_licensor_present": 1.0,
2133
+ "blimp_npi_present_1": 1.0,
2134
+ "blimp_npi_present_2": 1.0,
2135
+ "blimp_only_npi_licensor_present": 1.0,
2136
+ "blimp_only_npi_scope": 1.0,
2137
+ "blimp_passive_1": 1.0,
2138
+ "blimp_passive_2": 1.0,
2139
+ "blimp_principle_A_c_command": 1.0,
2140
+ "blimp_principle_A_case_1": 1.0,
2141
+ "blimp_principle_A_case_2": 1.0,
2142
+ "blimp_principle_A_domain_1": 1.0,
2143
+ "blimp_principle_A_domain_2": 1.0,
2144
+ "blimp_principle_A_domain_3": 1.0,
2145
+ "blimp_principle_A_reconstruction": 1.0,
2146
+ "blimp_regular_plural_subject_verb_agreement_1": 1.0,
2147
+ "blimp_regular_plural_subject_verb_agreement_2": 1.0,
2148
+ "blimp_sentential_negation_npi_licensor_present": 1.0,
2149
+ "blimp_sentential_negation_npi_scope": 1.0,
2150
+ "blimp_sentential_subject_island": 1.0,
2151
+ "blimp_superlative_quantifiers_1": 1.0,
2152
+ "blimp_superlative_quantifiers_2": 1.0,
2153
+ "blimp_tough_vs_raising_1": 1.0,
2154
+ "blimp_tough_vs_raising_2": 1.0,
2155
+ "blimp_transitive": 1.0,
2156
+ "blimp_wh_island": 1.0,
2157
+ "blimp_wh_questions_object_gap": 1.0,
2158
+ "blimp_wh_questions_subject_gap": 1.0,
2159
+ "blimp_wh_questions_subject_gap_long_distance": 1.0,
2160
+ "blimp_wh_vs_that_no_gap": 1.0,
2161
+ "blimp_wh_vs_that_no_gap_long_distance": 1.0,
2162
+ "blimp_wh_vs_that_with_gap": 1.0,
2163
+ "blimp_wh_vs_that_with_gap_long_distance": 1.0
2164
+ },
2165
+ "n-shot": {
2166
+ "blimp": 0,
2167
+ "blimp_adjunct_island": 0,
2168
+ "blimp_anaphor_gender_agreement": 0,
2169
+ "blimp_anaphor_number_agreement": 0,
2170
+ "blimp_animate_subject_passive": 0,
2171
+ "blimp_animate_subject_trans": 0,
2172
+ "blimp_causative": 0,
2173
+ "blimp_complex_NP_island": 0,
2174
+ "blimp_coordinate_structure_constraint_complex_left_branch": 0,
2175
+ "blimp_coordinate_structure_constraint_object_extraction": 0,
2176
+ "blimp_determiner_noun_agreement_1": 0,
2177
+ "blimp_determiner_noun_agreement_2": 0,
2178
+ "blimp_determiner_noun_agreement_irregular_1": 0,
2179
+ "blimp_determiner_noun_agreement_irregular_2": 0,
2180
+ "blimp_determiner_noun_agreement_with_adj_2": 0,
2181
+ "blimp_determiner_noun_agreement_with_adj_irregular_1": 0,
2182
+ "blimp_determiner_noun_agreement_with_adj_irregular_2": 0,
2183
+ "blimp_determiner_noun_agreement_with_adjective_1": 0,
2184
+ "blimp_distractor_agreement_relational_noun": 0,
2185
+ "blimp_distractor_agreement_relative_clause": 0,
2186
+ "blimp_drop_argument": 0,
2187
+ "blimp_ellipsis_n_bar_1": 0,
2188
+ "blimp_ellipsis_n_bar_2": 0,
2189
+ "blimp_existential_there_object_raising": 0,
2190
+ "blimp_existential_there_quantifiers_1": 0,
2191
+ "blimp_existential_there_quantifiers_2": 0,
2192
+ "blimp_existential_there_subject_raising": 0,
2193
+ "blimp_expletive_it_object_raising": 0,
2194
+ "blimp_inchoative": 0,
2195
+ "blimp_intransitive": 0,
2196
+ "blimp_irregular_past_participle_adjectives": 0,
2197
+ "blimp_irregular_past_participle_verbs": 0,
2198
+ "blimp_irregular_plural_subject_verb_agreement_1": 0,
2199
+ "blimp_irregular_plural_subject_verb_agreement_2": 0,
2200
+ "blimp_left_branch_island_echo_question": 0,
2201
+ "blimp_left_branch_island_simple_question": 0,
2202
+ "blimp_matrix_question_npi_licensor_present": 0,
2203
+ "blimp_npi_present_1": 0,
2204
+ "blimp_npi_present_2": 0,
2205
+ "blimp_only_npi_licensor_present": 0,
2206
+ "blimp_only_npi_scope": 0,
2207
+ "blimp_passive_1": 0,
2208
+ "blimp_passive_2": 0,
2209
+ "blimp_principle_A_c_command": 0,
2210
+ "blimp_principle_A_case_1": 0,
2211
+ "blimp_principle_A_case_2": 0,
2212
+ "blimp_principle_A_domain_1": 0,
2213
+ "blimp_principle_A_domain_2": 0,
2214
+ "blimp_principle_A_domain_3": 0,
2215
+ "blimp_principle_A_reconstruction": 0,
2216
+ "blimp_regular_plural_subject_verb_agreement_1": 0,
2217
+ "blimp_regular_plural_subject_verb_agreement_2": 0,
2218
+ "blimp_sentential_negation_npi_licensor_present": 0,
2219
+ "blimp_sentential_negation_npi_scope": 0,
2220
+ "blimp_sentential_subject_island": 0,
2221
+ "blimp_superlative_quantifiers_1": 0,
2222
+ "blimp_superlative_quantifiers_2": 0,
2223
+ "blimp_tough_vs_raising_1": 0,
2224
+ "blimp_tough_vs_raising_2": 0,
2225
+ "blimp_transitive": 0,
2226
+ "blimp_wh_island": 0,
2227
+ "blimp_wh_questions_object_gap": 0,
2228
+ "blimp_wh_questions_subject_gap": 0,
2229
+ "blimp_wh_questions_subject_gap_long_distance": 0,
2230
+ "blimp_wh_vs_that_no_gap": 0,
2231
+ "blimp_wh_vs_that_no_gap_long_distance": 0,
2232
+ "blimp_wh_vs_that_with_gap": 0,
2233
+ "blimp_wh_vs_that_with_gap_long_distance": 0
2234
+ },
2235
+ "config": {
2236
+ "model": "hf",
2237
+ "model_args": "pretrained=SmerkyG/rwkv-5-world-3b,dtype=bfloat16,trust_remote_code=True",
2238
+ "batch_size": "auto",
2239
+ "batch_sizes": [
2240
+ 64
2241
+ ],
2242
+ "device": null,
2243
+ "use_cache": null,
2244
+ "limit": null,
2245
+ "bootstrap_iters": 100000,
2246
+ "gen_kwargs": null
2247
+ },
2248
+ "git_hash": "1ee41f7"
2249
+ }
lm-eval-output/SmerkyG/rwkv-5-world-3b/blimp/dtype=bfloat16,trust_remote_code=True-num_fewshot=-1-nvidia-gpu/taskrun.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:718ff43719a07f9fc3f778b7c693026ca84630e3ea716921ab0cc81e2d9ef78d
3
+ size 325657