flaviagiammarino commited on
Commit
3871cc8
1 Parent(s): 960c87d

Delete scripts

Browse files
Files changed (1) hide show
  1. scripts/process_dataset.py +0 -35
scripts/process_dataset.py DELETED
@@ -1,35 +0,0 @@
1
- """This script de-duplicates the data provided by the PathVQA authors,
2
- creates an "imagefolder" dataset and pushes it to the hub.
3
- """
4
-
5
- import os
6
- import shutil
7
- import pickle
8
- import datasets
9
- import pandas as pd
10
-
11
- for split in ["train", "val", "test"]:
12
-
13
- os.makedirs(f"data/{split}/", exist_ok=True)
14
-
15
- # load the image-question-answer triplets
16
- data = pd.DataFrame(pickle.load(open(f"pvqa/qas/{split}/{split}_qa.pkl", "rb")))
17
-
18
- # drop the duplicate image-question-answer triplets
19
- data = data.drop_duplicates(ignore_index=True)
20
-
21
- # convert the image names to file names
22
- data = data.rename(columns={"image": "file_name"})
23
- data["file_name"] += ".jpg"
24
-
25
- # copy the images referenced by the question-answer pairs
26
- for image in data["file_name"].unique():
27
- shutil.copyfile(src=f"pvqa/images/{split}/{image}", dst=f"data/{split}/{image}")
28
-
29
- # save the metadata
30
- data.to_csv(f"data/{split}/metadata.csv", index=False)
31
-
32
- # push the dataset to the hub
33
- dataset = datasets.load_dataset("imagefolder", data_dir="data/")
34
- dataset.push_to_hub("flaviagiammarino/path-vqa")
35
-