Datasets:
Modalities:
3D
Size:
10K<n<100K
commit files to HF hub
Browse files- metadata.json +2 -0
- windtunnel_dataset.py → windtunnel.py +22 -12
metadata.json
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
{
|
2 |
"1": {
|
3 |
"streamlines_mesh.ply": "data/1/streamlines_mesh.ply",
|
|
|
4 |
"coeff.json": "data/1/coeff.json",
|
5 |
"openfoam_mesh.obj": "data/1/openfoam_mesh.obj",
|
6 |
"pressure_field_mesh.vtk": "data/1/pressure_field_mesh.vtk",
|
@@ -8,6 +9,7 @@
|
|
8 |
},
|
9 |
"2": {
|
10 |
"streamlines_mesh.ply": "data/2/streamlines_mesh.ply",
|
|
|
11 |
"coeff.json": "data/2/coeff.json",
|
12 |
"openfoam_mesh.obj": "data/2/openfoam_mesh.obj",
|
13 |
"pressure_field_mesh.vtk": "data/2/pressure_field_mesh.vtk",
|
|
|
1 |
{
|
2 |
"1": {
|
3 |
"streamlines_mesh.ply": "data/1/streamlines_mesh.ply",
|
4 |
+
"input.json": "data/1/input.json",
|
5 |
"coeff.json": "data/1/coeff.json",
|
6 |
"openfoam_mesh.obj": "data/1/openfoam_mesh.obj",
|
7 |
"pressure_field_mesh.vtk": "data/1/pressure_field_mesh.vtk",
|
|
|
9 |
},
|
10 |
"2": {
|
11 |
"streamlines_mesh.ply": "data/2/streamlines_mesh.ply",
|
12 |
+
"input.json": "data/2/input.json",
|
13 |
"coeff.json": "data/2/coeff.json",
|
14 |
"openfoam_mesh.obj": "data/2/openfoam_mesh.obj",
|
15 |
"pressure_field_mesh.vtk": "data/2/pressure_field_mesh.vtk",
|
windtunnel_dataset.py → windtunnel.py
RENAMED
@@ -2,6 +2,7 @@ import json
|
|
2 |
import os
|
3 |
import datasets
|
4 |
|
|
|
5 |
class WindtunnelDataset(datasets.GeneratorBasedBuilder):
|
6 |
"""Dataset for loading simulation data with JSON and mesh files"""
|
7 |
|
@@ -10,7 +11,7 @@ class WindtunnelDataset(datasets.GeneratorBasedBuilder):
|
|
10 |
features=datasets.Features(
|
11 |
{
|
12 |
"coeff": datasets.Value("dict"), # JSON file as a dictionary
|
13 |
-
|
14 |
"input_mesh": datasets.Value("binary"), # OBJ mesh file as binary
|
15 |
"openfoam_mesh": datasets.Value(
|
16 |
"binary"
|
@@ -25,17 +26,26 @@ class WindtunnelDataset(datasets.GeneratorBasedBuilder):
|
|
25 |
|
26 |
def _split_generators(self, dl_manager):
|
27 |
"""Define the splits for the dataset."""
|
28 |
-
metadata_path = os.path.join(self.config.data_dir,
|
29 |
-
with open(metadata_path,
|
30 |
metadata = json.load(f)
|
31 |
|
32 |
# Split the dataset into train, validation, and test
|
33 |
train_ids, val_ids, test_ids = self._train_val_test_split(metadata)
|
34 |
|
35 |
return [
|
36 |
-
datasets.SplitGenerator(
|
37 |
-
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
]
|
40 |
|
41 |
def _generate_examples(self, metadata):
|
@@ -45,7 +55,7 @@ class WindtunnelDataset(datasets.GeneratorBasedBuilder):
|
|
45 |
sim_id,
|
46 |
{
|
47 |
"coeff": self._load_json(files["coeff"]),
|
48 |
-
|
49 |
"input_mesh": self._load_binary(
|
50 |
os.path.join(self.config.data_dir, sim_id, "input_mesh.obj")
|
51 |
),
|
@@ -67,18 +77,18 @@ class WindtunnelDataset(datasets.GeneratorBasedBuilder):
|
|
67 |
|
68 |
def _load_json(self, file_path):
|
69 |
"""Load a JSON file and return it as a dictionary."""
|
70 |
-
with open(file_path,
|
71 |
return json.load(f)
|
72 |
|
73 |
def _load_binary(self, file_path):
|
74 |
"""Load a binary file and return its contents."""
|
75 |
-
with open(file_path,
|
76 |
return f.read()
|
77 |
|
78 |
def _train_val_test_split(self, metadata):
|
79 |
"""Implement logic for splitting metadata into train, validation, and test sets."""
|
80 |
keys = list(metadata.keys())
|
81 |
-
train_ids = keys[:int(0.7 * len(keys))]
|
82 |
-
val_ids = keys[int(0.7 * len(keys)):int(0.85 * len(keys))]
|
83 |
-
test_ids = keys[int(0.85 * len(keys)):]
|
84 |
return train_ids, val_ids, test_ids
|
|
|
2 |
import os
|
3 |
import datasets
|
4 |
|
5 |
+
|
6 |
class WindtunnelDataset(datasets.GeneratorBasedBuilder):
|
7 |
"""Dataset for loading simulation data with JSON and mesh files"""
|
8 |
|
|
|
11 |
features=datasets.Features(
|
12 |
{
|
13 |
"coeff": datasets.Value("dict"), # JSON file as a dictionary
|
14 |
+
"input": datasets.Value("dict"), # JSON file as a dictionary
|
15 |
"input_mesh": datasets.Value("binary"), # OBJ mesh file as binary
|
16 |
"openfoam_mesh": datasets.Value(
|
17 |
"binary"
|
|
|
26 |
|
27 |
def _split_generators(self, dl_manager):
|
28 |
"""Define the splits for the dataset."""
|
29 |
+
metadata_path = os.path.join(self.config.data_dir, "metadata.json")
|
30 |
+
with open(metadata_path, "r") as f:
|
31 |
metadata = json.load(f)
|
32 |
|
33 |
# Split the dataset into train, validation, and test
|
34 |
train_ids, val_ids, test_ids = self._train_val_test_split(metadata)
|
35 |
|
36 |
return [
|
37 |
+
datasets.SplitGenerator(
|
38 |
+
name=datasets.Split.TRAIN,
|
39 |
+
gen_kwargs={"metadata": {id: metadata[id] for id in train_ids}},
|
40 |
+
),
|
41 |
+
datasets.SplitGenerator(
|
42 |
+
name=datasets.Split.VALIDATION,
|
43 |
+
gen_kwargs={"metadata": {id: metadata[id] for id in val_ids}},
|
44 |
+
),
|
45 |
+
datasets.SplitGenerator(
|
46 |
+
name=datasets.Split.TEST,
|
47 |
+
gen_kwargs={"metadata": {id: metadata[id] for id in test_ids}},
|
48 |
+
),
|
49 |
]
|
50 |
|
51 |
def _generate_examples(self, metadata):
|
|
|
55 |
sim_id,
|
56 |
{
|
57 |
"coeff": self._load_json(files["coeff"]),
|
58 |
+
"input": self._load_json(files["input"]),
|
59 |
"input_mesh": self._load_binary(
|
60 |
os.path.join(self.config.data_dir, sim_id, "input_mesh.obj")
|
61 |
),
|
|
|
77 |
|
78 |
def _load_json(self, file_path):
|
79 |
"""Load a JSON file and return it as a dictionary."""
|
80 |
+
with open(file_path, "r") as f:
|
81 |
return json.load(f)
|
82 |
|
83 |
def _load_binary(self, file_path):
|
84 |
"""Load a binary file and return its contents."""
|
85 |
+
with open(file_path, "rb") as f:
|
86 |
return f.read()
|
87 |
|
88 |
def _train_val_test_split(self, metadata):
|
89 |
"""Implement logic for splitting metadata into train, validation, and test sets."""
|
90 |
keys = list(metadata.keys())
|
91 |
+
train_ids = keys[: int(0.7 * len(keys))]
|
92 |
+
val_ids = keys[int(0.7 * len(keys)) : int(0.85 * len(keys))]
|
93 |
+
test_ids = keys[int(0.85 * len(keys)) :]
|
94 |
return train_ids, val_ids, test_ids
|