pierreguillou commited on
Commit
e8f67ee
1 Parent(s): a58a55e

Update DocLayNet-small.py

Browse files
Files changed (1) hide show
  1. DocLayNet-small.py +14 -21
DocLayNet-small.py CHANGED
@@ -68,13 +68,12 @@ logger = datasets.logging.get_logger(__name__)
68
  class DocLayNetBuilderConfig(datasets.BuilderConfig):
69
  """BuilderConfig for DocLayNet small"""
70
 
71
- def __init__(self, name, splits, **kwargs):
72
  """BuilderConfig for DocLayNet small.
73
  Args:
74
  **kwargs: keyword arguments forwarded to super.
75
  """
76
  super().__init__(name, **kwargs)
77
- self.splits = splits
78
 
79
 
80
  class DocLayNet(datasets.GeneratorBasedBuilder):
@@ -100,7 +99,7 @@ class DocLayNet(datasets.GeneratorBasedBuilder):
100
  # data = datasets.load_dataset('my_dataset', 'first_domain')
101
  # data = datasets.load_dataset('my_dataset', 'second_domain')
102
  BUILDER_CONFIGS = [
103
- DocLayNetBuilderConfig(name="DocLayNet_2022.08_processed_on_2023.01", splits=["train", "val", "valid", "validation", "dev", "test"], version=datasets.Version("1.0.0"), description="DocLayNeT small dataset"),
104
  ]
105
 
106
  BUILDER_CONFIG_CLASS = DocLayNetBuilderConfig
@@ -158,42 +157,36 @@ class DocLayNet(datasets.GeneratorBasedBuilder):
158
  # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
159
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
160
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
161
-
162
  downloaded_file = dl_manager.download_and_extract("https://huggingface.co/datasets/pierreguillou/DocLayNet-small/resolve/main/data/dataset_small.zip")
163
- splits = []
164
- for split in self.config.splits:
165
- if split == "train":
166
- dataset = datasets.SplitGenerator(
167
  name=datasets.Split.TRAIN,
168
  # These kwargs will be passed to _generate_examples
169
  gen_kwargs={
170
  "filepath": os.path.join(downloaded_file, "small_dataset/train/"),
171
  "split": "train",
172
  },
173
- )
174
- elif split in ["val", "valid", "validation", "dev"]:
175
- dataset = datasets.SplitGenerator(
176
  name=datasets.Split.VALIDATION,
177
  # These kwargs will be passed to _generate_examples
178
  gen_kwargs={
179
  "filepath": os.path.join(downloaded_file, "small_dataset/val/"),
180
- "split": "val",
181
  },
182
- )
183
- elif split in ["test"]:
184
- dataset = datasets.SplitGenerator(
185
  name=datasets.Split.TEST,
186
  # These kwargs will be passed to _generate_examples
187
  gen_kwargs={
188
  "filepath": os.path.join(downloaded_file, "small_dataset/test/"),
189
- "split": "test",
190
  },
191
- )
192
- else:
193
- continue
194
 
195
- splits.append(dataset)
196
- return splits
197
 
198
  def _generate_examples(self, filepath, split):
199
  logger.info("⏳ Generating examples from = %s", filepath)
 
68
  class DocLayNetBuilderConfig(datasets.BuilderConfig):
69
  """BuilderConfig for DocLayNet small"""
70
 
71
+ def __init__(self, name, **kwargs):
72
  """BuilderConfig for DocLayNet small.
73
  Args:
74
  **kwargs: keyword arguments forwarded to super.
75
  """
76
  super().__init__(name, **kwargs)
 
77
 
78
 
79
  class DocLayNet(datasets.GeneratorBasedBuilder):
 
99
  # data = datasets.load_dataset('my_dataset', 'first_domain')
100
  # data = datasets.load_dataset('my_dataset', 'second_domain')
101
  BUILDER_CONFIGS = [
102
+ DocLayNetBuilderConfig(name="DocLayNet_2022.08_processed_on_2023.01", version=datasets.Version("1.0.0"), description="DocLayNeT small dataset"),
103
  ]
104
 
105
  BUILDER_CONFIG_CLASS = DocLayNetBuilderConfig
 
157
  # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
158
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
159
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
160
+
161
  downloaded_file = dl_manager.download_and_extract("https://huggingface.co/datasets/pierreguillou/DocLayNet-small/resolve/main/data/dataset_small.zip")
162
+
163
+ return [
164
+ datasets.SplitGenerator(
 
165
  name=datasets.Split.TRAIN,
166
  # These kwargs will be passed to _generate_examples
167
  gen_kwargs={
168
  "filepath": os.path.join(downloaded_file, "small_dataset/train/"),
169
  "split": "train",
170
  },
171
+ ),
172
+ datasets.SplitGenerator(
 
173
  name=datasets.Split.VALIDATION,
174
  # These kwargs will be passed to _generate_examples
175
  gen_kwargs={
176
  "filepath": os.path.join(downloaded_file, "small_dataset/val/"),
177
+ "split": "dev",
178
  },
179
+ ),
180
+ datasets.SplitGenerator(
 
181
  name=datasets.Split.TEST,
182
  # These kwargs will be passed to _generate_examples
183
  gen_kwargs={
184
  "filepath": os.path.join(downloaded_file, "small_dataset/test/"),
185
+ "split": "test"
186
  },
187
+ ),
188
+ ]
 
189
 
 
 
190
 
191
  def _generate_examples(self, filepath, split):
192
  logger.info("⏳ Generating examples from = %s", filepath)