rvalerio commited on
Commit
0326d8c
·
1 Parent(s): 7b22ead

commit files to HF hub

Browse files
Files changed (1) hide show
  1. windtunnel_dataset.py +63 -0
windtunnel_dataset.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import datasets
4
+
5
+ class WindtunnelDataset(datasets.GeneratorBasedBuilder):
6
+ """Dataset for loading simulation data with JSON and mesh files"""
7
+
8
+ def _info(self):
9
+ return datasets.DatasetInfo(
10
+ features=datasets.Features({
11
+ 'coeff': datasets.Value('dict'), # JSON file as a dictionary
12
+ 'input': datasets.Value('dict'), # JSON file as a dictionary
13
+ 'input_mesh': datasets.Value('binary'), # OBJ mesh file as binary
14
+ 'openfoam_mesh': datasets.Value('binary'), # OBJ mesh file as binary
15
+ 'pressure_field_mesh': datasets.Value('binary'),# VTK file as binary
16
+ 'streamlines_mesh': datasets.Value('binary'), # PLY file as binary
17
+ })
18
+ )
19
+
20
+ def _split_generators(self, dl_manager):
21
+ """Define the splits for the dataset."""
22
+ metadata_path = os.path.join(self.config.data_dir, 'metadata.json')
23
+ with open(metadata_path, 'r') as f:
24
+ metadata = json.load(f)
25
+
26
+ # Split the dataset into train, validation, and test
27
+ train_ids, val_ids, test_ids = self._train_val_test_split(metadata)
28
+
29
+ return [
30
+ datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={'metadata': {id: metadata[id] for id in train_ids}}),
31
+ datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={'metadata': {id: metadata[id] for id in val_ids}}),
32
+ datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={'metadata': {id: metadata[id] for id in test_ids}}),
33
+ ]
34
+
35
+ def _generate_examples(self, metadata):
36
+ """Generate examples for each split."""
37
+ for sim_id, files in metadata.items():
38
+ yield sim_id, {
39
+ 'coeff': self._load_json(files['coeff']),
40
+ 'input': self._load_json(files['input']),
41
+ 'input_mesh': self._load_binary(os.path.join(self.config.data_dir, sim_id, 'input_mesh.obj')),
42
+ 'openfoam_mesh': self._load_binary(os.path.join(self.config.data_dir, sim_id, 'openfoam_mesh.obj')),
43
+ 'pressure_field_mesh': self._load_binary(os.path.join(self.config.data_dir, sim_id, 'pressure_field_mesh.vtk')),
44
+ 'streamlines_mesh': self._load_binary(os.path.join(self.config.data_dir, sim_id, 'streamlines_mesh.ply')),
45
+ }
46
+
47
+ def _load_json(self, file_path):
48
+ """Load a JSON file and return it as a dictionary."""
49
+ with open(file_path, 'r') as f:
50
+ return json.load(f)
51
+
52
+ def _load_binary(self, file_path):
53
+ """Load a binary file and return its contents."""
54
+ with open(file_path, 'rb') as f:
55
+ return f.read()
56
+
57
+ def _train_val_test_split(self, metadata):
58
+ """Implement logic for splitting metadata into train, validation, and test sets."""
59
+ keys = list(metadata.keys())
60
+ train_ids = keys[:int(0.7 * len(keys))]
61
+ val_ids = keys[int(0.7 * len(keys)):int(0.85 * len(keys))]
62
+ test_ids = keys[int(0.85 * len(keys)):]
63
+ return train_ids, val_ids, test_ids