|
import os |
|
import datasets |
|
|
|
class HurricaneDetection(datasets.GeneratorBasedBuilder): |
|
VERSION = datasets.Version("1.0.0") |
|
|
|
def _info(self): |
|
""" |
|
Defines the dataset metadata and feature structure. |
|
""" |
|
return datasets.DatasetInfo( |
|
description="Dataset containing .nc files for training.", |
|
features=datasets.Features({ |
|
"file_path": datasets.Value("string"), |
|
}), |
|
supervised_keys=None, |
|
homepage="https://huggingface.co/datasets/nasa-impact/WINDSET/tree/main/hurricane", |
|
license="MIT", |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
""" |
|
Define the dataset splits for train. |
|
""" |
|
|
|
data_dir = os.path.join(os.getcwd(), "hurricane") |
|
|
|
|
|
train_dir = os.path.join(data_dir) |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={"split_dir": train_dir}, |
|
), |
|
] |
|
|
|
def _generate_data_from_files(self, data_dir): |
|
""" |
|
Generate file paths for each .h5 file in the directory. |
|
""" |
|
example_id = 0 |
|
|
|
|
|
for h5_file in os.listdir(data_dir): |
|
|
|
if h5_file.endswith(".h5"): |
|
h5_file_path = os.path.join(data_dir, h5_file) |
|
|
|
yield example_id, { |
|
"file_path": h5_file_path, |
|
} |
|
example_id += 1 |
|
else: |
|
pass |
|
|
|
def _generate_examples(self, split_dir): |
|
""" |
|
Generates examples for the dataset from the split directory. |
|
""" |
|
|
|
for example_id, example in self._generate_data_from_files(split_dir): |
|
yield example_id, example |
|
|