wckwan commited on
Commit
beb10bf
1 Parent(s): 0f95a4b

Upload 2 files

Browse files
Files changed (2) hide show
  1. M4LE.py +149 -0
  2. data.zip +3 -0
M4LE.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import os
15
+
16
+ import datasets
17
+ import json
18
+
19
+
20
+ _DESCRIPTION = (
21
+ "M4LE is a systematic and comprehensive long-context benchmark. It aims to"
22
+ " evaluate LM performances in five long-context understanding abilities,"
23
+ " across multiple domains, languages and task types."
24
+ )
25
+ _HOMEPAGE = "https://github.com/KwanWaiChung/M4LE"
26
+ _LICENSE = """MIT License
27
+ Copyright (c) 2023 Wai-Chung Kwan
28
+
29
+ Permission is hereby granted, free of charge, to any person obtaining a copy
30
+ of this software and associated documentation files (the "Software"), to deal
31
+ in the Software without restriction, including without limitation the rights
32
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
33
+ copies of the Software, and to permit persons to whom the Software is
34
+ furnished to do so, subject to the following conditions:
35
+
36
+ The above copyright notice and this permission notice shall be included in all
37
+ copies or substantial portions of the Software.
38
+
39
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
40
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
41
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
42
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
43
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
44
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
45
+ SOFTWARE."""
46
+ URL = r"https://huggingface.co/datasets/wckwan/M4LE/resolve/main/data.zip"
47
+
48
+
49
+ tasks = [
50
+ "mnds-news_semantic-multiple_cls+cnt",
51
+ "news-commentary_global_tran-zh2en",
52
+ "wiki2019zh_semantic-single_nli+ret",
53
+ "thucnews_semantic-multiple_cls+cnt",
54
+ "pubmed_global_sum",
55
+ "marc_explicit-multiple_cls+ret",
56
+ "bigpatent_global_cls",
57
+ "wow_explicit-single_ret",
58
+ "cepsum_explicit-single_sum+ret",
59
+ "drcd_explicit-single_ret",
60
+ "arxiv_global_sum",
61
+ "nq-open_semantic-single_qa",
62
+ "triviaqa_global_qa",
63
+ "booksum_global_sum",
64
+ "online-shopping_explicit-multiple_cls+ret",
65
+ "c3_explicit-single_qa+ret",
66
+ "clts_global_sum",
67
+ "cnnnews_explicit-single_sum+ret",
68
+ "wikihow_semantic-single_sum",
69
+ "news-commentary_global_tran-en2zh",
70
+ "hotpotqa_semantic-multiple_qa",
71
+ "duorc_semantic-single_qa",
72
+ "wikitext-103_semantic-single_nli+ret",
73
+ "lcsts_explicit-single_sum+ret",
74
+ "newsqa_explicit-single_qa",
75
+ "dureader_semantic-single_qa",
76
+ "mnds-news_explicit-multiple_cls+ret",
77
+ "tedtalks_semantic-single_tran+ret-zh2en",
78
+ "drcd_semantic-single_qa",
79
+ "news2016_semantic-single_sum+ret",
80
+ "mnds-news_explicit-single_cls+ret",
81
+ "open-subtitles_global_tran-zh2en",
82
+ "bigpatent_global_sum",
83
+ "cnewsum_global_sum",
84
+ "ncls_explicit-single_sum+ret",
85
+ "open-subtitles_global_tran-en2zh",
86
+ "thucnews_explicit-single_cls+ret",
87
+ "thucnews_explicit-multiple_cls+ret",
88
+ "tedtalks_semantic-single_tran+ret-en2zh",
89
+ ]
90
+
91
+
92
+ class M4LEConfig(datasets.BuilderConfig):
93
+ def __init__(self, **kwargs):
94
+ super().__init__(version=datasets.Version("1.0.0"), **kwargs)
95
+
96
+
97
+ class LongBench(datasets.GeneratorBasedBuilder):
98
+ BUILDER_CONFIGS = [
99
+ M4LEConfig(
100
+ name=task,
101
+ )
102
+ for task in tasks
103
+ ]
104
+
105
+ def _info(self):
106
+ features = datasets.Features(
107
+ {
108
+ "instruction": datasets.Value("string"),
109
+ "input": datasets.Value("string"),
110
+ "answers": [datasets.Value("string")],
111
+ "input_length": datasets.Value("int32"),
112
+ "total_length": datasets.Value("int32"),
113
+ "length_bucket": datasets.Value("int32"),
114
+ }
115
+ )
116
+ return datasets.DatasetInfo(
117
+ description=_DESCRIPTION,
118
+ features=features,
119
+ homepage=_HOMEPAGE,
120
+ license=_LICENSE,
121
+ )
122
+
123
+ def _split_generators(self, dl_manager):
124
+ data_dir = dl_manager.download_and_extract(URL)
125
+ task_name = self.config.name
126
+ return [
127
+ datasets.SplitGenerator(
128
+ name=datasets.Split.TEST,
129
+ gen_kwargs={
130
+ "filepath": os.path.join(
131
+ data_dir, "data", f"{task_name}.jsonl"
132
+ ),
133
+ },
134
+ )
135
+ ]
136
+
137
+ def _generate_examples(self, filepath):
138
+ with open(filepath, encoding="utf-8") as f:
139
+ for idx, line in enumerate(f):
140
+ key = f"{self.config.name}-{idx}"
141
+ item = json.loads(line)
142
+ yield key, {
143
+ "instruction": item["instruction"],
144
+ "input": item["input"],
145
+ "answers": item["answers"],
146
+ "input_length": item["input_length"],
147
+ "total_length": item["total_length"],
148
+ "length_bucket": item["length_bucket"],
149
+ }
data.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c69d03adbc2fcc893a4db61d06f2eba65af901d6c9e5bd9a31614b3436931bff
3
+ size 1090018654