windtunnel / windtunnel.py
rvalerio's picture
commit files to HF hub
c9732a6
raw
history blame
3.69 kB
import json
import os
import datasets
class WindtunnelDataset(datasets.GeneratorBasedBuilder):
"""Dataset for loading simulation data with JSON and mesh files"""
def _info(self):
return datasets.DatasetInfo(
features=datasets.Features(
{
"coeff": datasets.Value("binary"), # JSON file as a dictionary
"input": datasets.Value("binary"), # JSON file as a dictionary
"input_mesh": datasets.Value("binary"), # OBJ mesh file as binary
"openfoam_mesh": datasets.Value(
"binary"
), # OBJ mesh file as binary
"pressure_field_mesh": datasets.Value(
"binary"
), # VTK file as binary
"streamlines_mesh": datasets.Value("binary"), # PLY file as binary
}
)
)
def _split_generators(self, dl_manager):
"""Define the splits for the dataset."""
with open("metadata.json", "r") as f:
metadata = json.load(f)
# Split the dataset into train, validation, and test
train_ids, val_ids, test_ids = self._train_val_test_split(metadata)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={"metadata": {id: metadata[id] for id in train_ids}},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
gen_kwargs={"metadata": {id: metadata[id] for id in val_ids}},
),
datasets.SplitGenerator(
name=datasets.Split.TEST,
gen_kwargs={"metadata": {id: metadata[id] for id in test_ids}},
),
]
def _generate_examples(self, metadata):
"""Generate examples for each split."""
for sim_id, files in metadata.items():
yield (
sim_id,
{
"coeff": self._load_json(files["coeff"]),
"input": self._load_json(files["input"]),
"input_mesh": self._load_binary(
os.path.join(self.config.data_dir, sim_id, "input_mesh.obj")
),
"openfoam_mesh": self._load_binary(
os.path.join(self.config.data_dir, sim_id, "openfoam_mesh.obj")
),
"pressure_field_mesh": self._load_binary(
os.path.join(
self.config.data_dir, sim_id, "pressure_field_mesh.vtk"
)
),
"streamlines_mesh": self._load_binary(
os.path.join(
self.config.data_dir, sim_id, "streamlines_mesh.ply"
)
),
},
)
def _load_json(self, file_path):
"""Load a JSON file and return it as a dictionary."""
with open(file_path, "r") as f:
return json.load(f)
def _load_binary(self, file_path):
"""Load a binary file and return its contents."""
with open(file_path, "rb") as f:
return f.read()
def _train_val_test_split(self, metadata):
"""Implement logic for splitting metadata into train, validation, and test sets."""
keys = list(metadata.keys())
train_ids = keys[: int(0.7 * len(keys))]
val_ids = keys[int(0.7 * len(keys)) : int(0.85 * len(keys))]
test_ids = keys[int(0.85 * len(keys)) :]
return train_ids, val_ids, test_ids