flaviagiammarino commited on
Commit
d90b4c1
1 Parent(s): 5d83468

Create scripts/processing.py

Browse files
Files changed (1) hide show
  1. scripts/processing.py +35 -0
scripts/processing.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """This script de-duplicates the data provided by the PathVQA authors,
2
+ creates an "imagefolder" dataset and pushes it to the Hugging Face Hub.
3
+ """
4
+
5
+ import os
6
+ import shutil
7
+ import pickle
8
+ import datasets
9
+ import pandas as pd
10
+
11
+ for split in ["train", "val", "test"]:
12
+
13
+ os.makedirs(f"data/{split}/", exist_ok=True)
14
+
15
+ # load the image-question-answer triplets
16
+ data = pd.DataFrame(pickle.load(open(f"pvqa/qas/{split}/{split}_qa.pkl", "rb")))
17
+
18
+ # drop the duplicate image-question-answer triplets
19
+ data = data.drop_duplicates(ignore_index=True)
20
+
21
+ # copy the images using unique file names
22
+ data.insert(0, "file_name", "")
23
+ for i, row in data.iterrows():
24
+ file_name = f"img_{i}.jpg"
25
+ data["file_name"].iloc[i] = file_name
26
+ shutil.copyfile(src=f"pvqa/images/{split}/{row['image']}.jpg", dst=f"data/{split}/{file_name}")
27
+ _ = data.pop("image")
28
+
29
+ # save the metadata
30
+ data.to_csv(f"data/{split}/metadata.csv", index=False)
31
+
32
+ # push the dataset to the hub
33
+ dataset = datasets.load_dataset("imagefolder", data_dir="data/")
34
+ dataset.push_to_hub("flaviagiammarino/path-vqa")
35
+