AlexBlck commited on
Commit
9a24dfc
1 Parent(s): 4ac75fb

define splits

Browse files
Files changed (1) hide show
  1. ANAKIN.py +40 -35
ANAKIN.py CHANGED
@@ -163,57 +163,62 @@ class Anakin(datasets.GeneratorBasedBuilder):
163
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
164
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
165
  metadata_dir = dl_manager.download_and_extract(_METADATA_URL)
 
166
 
167
  random.seed(47)
168
  root_url = "https://huggingface.co/datasets/AlexBlck/ANAKIN/resolve/main/"
169
  df = pd.read_csv(metadata_dir)
 
 
 
 
 
170
  ids = df["video-id"].to_list()
171
  random.shuffle(ids)
172
 
173
- folders = _FOLDERS[self.config.name]
174
- data_urls = [
175
- {f"{folder}": root_url + f"{folder}/{idx}.mp4" for folder in folders}
176
- for idx in ids
177
- ]
178
- data_dir = dl_manager.download(data_urls)
179
- mask_dir = {
180
- idx: dl_manager.iter_archive(
181
- dl_manager.download(root_url + f"masks/{idx}.zip")
182
- )
183
- for idx in ids
184
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185
 
186
  return [
187
  datasets.SplitGenerator(
188
- name=datasets.Split.TRAIN,
189
- # These kwargs will be passed to _generate_examples
190
  gen_kwargs={
191
  "files": data_dir,
192
  "masks": mask_dir,
193
  "df": df,
194
  "return_time": "full" in folders,
195
  },
196
- ),
197
- datasets.SplitGenerator(
198
- name=datasets.Split.VALIDATION,
199
- # These kwargs will be passed to _generate_examples
200
- gen_kwargs={
201
- "files": data_dir,
202
- "masks": mask_dir,
203
- "df": df,
204
- "return_time": "full" in folders,
205
- },
206
- ),
207
- datasets.SplitGenerator(
208
- name=datasets.Split.TEST,
209
- # These kwargs will be passed to _generate_examples
210
- gen_kwargs={
211
- "files": data_dir,
212
- "masks": mask_dir,
213
- "df": df,
214
- "return_time": "full" in folders,
215
- },
216
- ),
217
  ]
218
 
219
  def _generate_examples(self, files, masks, df, return_time):
 
163
  # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
164
  # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
165
  metadata_dir = dl_manager.download_and_extract(_METADATA_URL)
166
+ folders = _FOLDERS[self.config.name]
167
 
168
  random.seed(47)
169
  root_url = "https://huggingface.co/datasets/AlexBlck/ANAKIN/resolve/main/"
170
  df = pd.read_csv(metadata_dir)
171
+ if "full" in folders:
172
+ df = df[df["full-available"] == True]
173
+ if "masks" in folders:
174
+ df = df[df["has-masks"] == True]
175
+
176
  ids = df["video-id"].to_list()
177
  random.shuffle(ids)
178
 
179
+ train_end = int(len(df) * 0.7)
180
+ val_end = int(len(df) * 0.8)
181
+ data_dir = {}
182
+ mask_dir = {}
183
+
184
+ for split in [
185
+ datasets.Split.TRAIN,
186
+ datasets.Split.VALIDATION,
187
+ datasets.Split.TEST,
188
+ ]:
189
+ if split == datasets.Split.TRAIN:
190
+ split_ids = ids[:train_end]
191
+ elif split == datasets.Split.VALIDATION:
192
+ split_ids = ids[train_end:val_end]
193
+ else:
194
+ split_ids = ids[val_end:]
195
+ data_urls = [
196
+ {f"{folder}": root_url + f"{folder}/{idx}.mp4" for folder in folders}
197
+ for idx in split_ids
198
+ ]
199
+ data_dir[split] = dl_manager.download(data_urls)
200
+ mask_dir[split] = {
201
+ idx: dl_manager.iter_archive(
202
+ dl_manager.download(root_url + f"masks/{idx}.zip")
203
+ )
204
+ for idx in split_ids
205
+ }
206
 
207
  return [
208
  datasets.SplitGenerator(
209
+ name=split,
 
210
  gen_kwargs={
211
  "files": data_dir,
212
  "masks": mask_dir,
213
  "df": df,
214
  "return_time": "full" in folders,
215
  },
216
+ )
217
+ for split in [
218
+ datasets.Split.TRAIN,
219
+ datasets.Split.VALIDATION,
220
+ datasets.Split.TEST,
221
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
222
  ]
223
 
224
  def _generate_examples(self, files, masks, df, return_time):