zyznull commited on
Commit
e7b285a
1 Parent(s): ad4d0bd

Upload msmarco-passage-corpus.py

Browse files
Files changed (1) hide show
  1. msmarco-passage-corpus.py +77 -0
msmarco-passage-corpus.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+
3
+ # Lint as: python3
4
+ """Passage Ranking fintune dataset."""
5
+
6
+ import json
7
+
8
+ import datasets
9
+
10
+ _CITATION = """
11
+ @misc{bajaj2018ms,
12
+ title={MS MARCO: A Human Generated MAchine Reading COmprehension Dataset},
13
+ author={Payal Bajaj and Daniel Campos and Nick Craswell and Li Deng and Jianfeng Gao and Xiaodong Liu
14
+ and Rangan Majumder and Andrew McNamara and Bhaskar Mitra and Tri Nguyen and Mir Rosenberg and Xia Song
15
+ and Alina Stoica and Saurabh Tiwary and Tong Wang},
16
+ year={2018},
17
+ eprint={1611.09268},
18
+ archivePrefix={arXiv},
19
+ primaryClass={cs.CL}
20
+ }
21
+ """
22
+
23
+ _DESCRIPTION = "MSMARCO Passage Ranking datas"
24
+
25
+ _DATASET_URLS = {
26
+ 'corpus': "https://modelscope.cn/api/v1/datasets/zyznull/MSMARCO-Passage/repo/files?Revision=master&FilePath=collection.tsv.gz",
27
+ 'train_query': "https://modelscope.cn/api/v1/datasets/zyznull/MSMARCO-Passage/repo/files?Revision=master&FilePath=train_queries.jsonl.gz",
28
+ 'dev_query': "https://modelscope.cn/api/v1/datasets/zyznull/MSMARCO-Passage/repo/files?Revision=master&FilePath=dev_queries.jsonl.gz",
29
+ }
30
+
31
+
32
+ class MsMarcoPassage(datasets.GeneratorBasedBuilder):
33
+ VERSION = datasets.Version("0.0.1")
34
+
35
+ BUILDER_CONFIGS = [
36
+ datasets.BuilderConfig(version=VERSION,
37
+ description="MS MARCO passage train/dev datasets"),
38
+ ]
39
+
40
+ def _info(self):
41
+ features = datasets.Features({
42
+ '_id': datasets.Value('string'),
43
+ 'text': datasets.Value('string'),
44
+ })
45
+ return datasets.DatasetInfo(
46
+ # This is the description that will appear on the datasets page.
47
+ description=_DESCRIPTION,
48
+ # This defines the different columns of the dataset and their types
49
+ features=features, # Here we define them above because they are different between the two configurations
50
+ supervised_keys=None,
51
+ # Homepage of the dataset for documentation
52
+ homepage="",
53
+ # License for the dataset if available
54
+ license="",
55
+ # Citation for the dataset
56
+ citation=_CITATION,
57
+ )
58
+
59
+ def _split_generators(self, dl_manager):
60
+ downloaded_files = dl_manager.download_and_extract(_DATASET_URLS)
61
+ splits = [
62
+ datasets.SplitGenerator(
63
+ name=split,
64
+ gen_kwargs={
65
+ "files": [downloaded_files[split]] if isinstance(downloaded_files[split], str) else downloaded_files[split],
66
+ },
67
+ ) for split in downloaded_files
68
+ ]
69
+ return splits
70
+
71
+ def _generate_examples(self, filepath):
72
+ """Yields examples."""
73
+ with open(filepath, encoding="utf-8") as f:
74
+ texts = f.readlines()
75
+ for i, text in enumerate(texts):
76
+ text = json.loads(text)
77
+ yield i, text