Datasets:
Tasks:
Image Segmentation
Modalities:
Image
Languages:
English
Tags:
Cloud Detection
Cloud Segmentation
Remote Sensing Images
Satellite Images
HRC-WHU
CloudSEN12-High
License:
XavierJiezou
commited on
Commit
•
793caea
1
Parent(s):
1618c27
Create upload_zip_to_hub.py
Browse files- upload_zip_to_hub.py +69 -0
upload_zip_to_hub.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import zipfile
|
3 |
+
from huggingface_hub import HfApi, HfFolder
|
4 |
+
|
5 |
+
# Define the root directory containing all datasets
|
6 |
+
base_root = "data" # Replace with the directory containing all datasets
|
7 |
+
dataset_repo = "XavierJiezou/Cloud-Adapter" # Hugging Face repository name
|
8 |
+
dataset_names = [
|
9 |
+
"hrc_whu",
|
10 |
+
"gf12ms_whu_gf1",
|
11 |
+
"gf12ms_whu_gf2",
|
12 |
+
"cloudsen12_high_l1c",
|
13 |
+
"cloudsen12_high_l2a",
|
14 |
+
"l8_biome",
|
15 |
+
]
|
16 |
+
|
17 |
+
# Function to create a ZIP file for a dataset directory
|
18 |
+
def create_zip(dataset_path, output_path):
|
19 |
+
"""
|
20 |
+
Compress a dataset directory into a ZIP file.
|
21 |
+
|
22 |
+
Args:
|
23 |
+
dataset_path (str): Path to the dataset directory.
|
24 |
+
output_path (str): Path to save the ZIP file.
|
25 |
+
"""
|
26 |
+
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
27 |
+
for root, _, files in os.walk(dataset_path):
|
28 |
+
for file in files:
|
29 |
+
file_path = os.path.join(root, file)
|
30 |
+
arcname = os.path.relpath(file_path, dataset_path)
|
31 |
+
zipf.write(file_path, arcname)
|
32 |
+
print(f"Compressed {dataset_path} into {output_path}")
|
33 |
+
|
34 |
+
# Function to upload ZIP files to Hugging Face Hub
|
35 |
+
def upload_zip_to_hub(dataset_name, zip_path, repo_name):
|
36 |
+
"""
|
37 |
+
Upload a ZIP file to a Hugging Face repository.
|
38 |
+
|
39 |
+
Args:
|
40 |
+
dataset_name (str): Name of the dataset (used as a file identifier).
|
41 |
+
zip_path (str): Path to the ZIP file.
|
42 |
+
repo_name (str): Hugging Face repository name.
|
43 |
+
"""
|
44 |
+
api = HfApi()
|
45 |
+
token = HfFolder.get_token()
|
46 |
+
file_name = f"{dataset_name}.zip"
|
47 |
+
api.upload_file(
|
48 |
+
path_or_fileobj=zip_path,
|
49 |
+
path_in_repo=file_name,
|
50 |
+
repo_id=repo_name,
|
51 |
+
repo_type="dataset",
|
52 |
+
token=token,
|
53 |
+
)
|
54 |
+
print(f"Uploaded {file_name} to {repo_name}")
|
55 |
+
|
56 |
+
# Main script
|
57 |
+
if __name__ == "__main__":
|
58 |
+
for dataset_name in dataset_names:
|
59 |
+
dataset_path = os.path.join(base_root, dataset_name)
|
60 |
+
if not os.path.exists(dataset_path):
|
61 |
+
print(f"Dataset directory does not exist: {dataset_path}")
|
62 |
+
continue
|
63 |
+
|
64 |
+
# Create ZIP file
|
65 |
+
zip_path = f"{dataset_name}.zip"
|
66 |
+
create_zip(dataset_path, zip_path)
|
67 |
+
|
68 |
+
# Upload ZIP file to Hugging Face Hub
|
69 |
+
upload_zip_to_hub(dataset_name, zip_path, dataset_repo)
|