Datasets:

Modalities:
Text
Size:
< 1K
ArXiv:
Libraries:
Datasets
License:
zhangir-azerbayev commited on
Commit
47834db
1 Parent(s): fffb492

added data

Browse files
Files changed (3) hide show
  1. data.json +0 -0
  2. proofnet.py +76 -0
  3. test_loader.py +11 -0
data.json ADDED
The diff for this file is too large to render. See raw diff
 
proofnet.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+
18
+ import json
19
+
20
+ import datasets
21
+ from datasets.tasks import QuestionAnsweringExtractive
22
+
23
+
24
+ logger = datasets.logging.get_logger(__name__)
25
+
26
+ class ProofNetConfig(datasets.BuilderConfig):
27
+ """BuilderConfig"""
28
+
29
+ def __init__(self, **kwargs):
30
+ """BuilderConfig
31
+ Args:
32
+ **kwargs: keyword arguments forwarded to super.
33
+ """
34
+ super(SquadConfig, self).__init__(**kwargs)
35
+
36
+
37
+ class ProofNet(datasets.GeneratorBasedBuilder):
38
+
39
+ BUILDER_CONFIGS = [
40
+ ProofNetConfig(
41
+ name="plain_text",
42
+ version=datasets.Version("1.0.0", ""),
43
+ description="Plain text",
44
+ ),
45
+ ]
46
+
47
+ def _info(self):
48
+ return datasets.DatasetInfo(
49
+ description=_DESCRIPTION,
50
+ features=datasets.Features(
51
+ {
52
+ "id": datasets.Value("string"),
53
+ "nl_statement": datasets.Value("string"),
54
+ "nl_proof": datasets.Value("string"),
55
+ "formal_statement": datasets.Value("string"),
56
+ "src_header": datasets.Value("string"),
57
+ }
58
+ ),
59
+ )
60
+
61
+ def _split_generators(self, dl_manager):
62
+ downloaded_files = dl_manager.download_and_extract(_URLS)
63
+
64
+ return [
65
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": dl_manager.download("data.json")}),
66
+ ]
67
+
68
+ def _generate_examples(self, filepath):
69
+ """This function returns the examples in the raw (text) form."""
70
+ logger.info("generating examples from = %s", filepath)
71
+ key = 0
72
+ with open(filepath, encoding="utf-8") as f:
73
+ instances = json.load(f)
74
+ for instance in instances:
75
+ yield key, instance
76
+ key += 1
test_loader.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datasets import load_dataset
2
+ from tqdm import tqdm
3
+
4
+ data = load_dataset("hoskinson-center/proofnet")
5
+
6
+ print(data)
7
+
8
+ print("looping through data...")
9
+ for x in tqdm(data):
10
+ pass
11
+ print("finished!")