NIST-In-Situ-IN625-LPBF-Overhangs / NIST-In-Situ-IN625-LPBF-Overhangs_old.py
ppak10's picture
Renames dataset script.
3c337bc
import datasets
import numpy as np
import os
import pickle
import pyarrow.parquet as pq
_DESCRIPTION = """\
In Situ Thermography During Laser Powder Bed Fusion of a Nickel Superalloy 625
Artifact with Various Overhangs and Supports
"""
LAYER_BASE_URLS = []
# Layers 1 to 99 inclusive without layer 22
for layer_number in range(1, 100, 1):
if layer_number != 22:
LAYER_BASE_URLS.append(f"./layer/base/{layer_number}.pkl")
LAYER_OVERHANG_WITH_SUPPORTS_URLS = [f"./layer/overhang_with_supports/{n}.pkl" for n in range(101, 281, 1)]
LAYER_BLOCK_URLS = [f"./layer/block/{n}.pkl" for n in range(281, 381, 1)]
LAYER_OVERHANG_NO_SUPPORTS_URLS = [f"./layer/overhang_no_supports/{n}.pkl" for n in range(381, 560, 1)]
LAYER_TABLE_BASE_URLS = []
# Layers 1 to 99 inclusive without layer 22
for layer_number in range(1, 100, 1):
if layer_number != 22:
LAYER_TABLE_BASE_URLS.append(f"./layer_table/base/{layer_number}.parquet")
LAYER_TABLE_OVERHANG_WITH_SUPPORTS_URLS = [f"./layer_table/overhang_with_supports/{n}.parquet" for n in range(101, 281, 1)]
LAYER_TABLE_BLOCK_URLS = [f"./layer_table/block/{n}.parquet" for n in range(281, 381, 1)]
LAYER_TABLE_OVERHANG_NO_SUPPORTS_URLS = [f"./layer_table/overhang_no_supports/{n}.parquet" for n in range(381, 560, 1)]
_URLS = {
"part_section": {
"base": "./part_section/BASE.pkl",
"block": "./part_section/BLOCK.pkl",
"overhang_no_supports": "./part_section/OVERHANG_noSup.pkl",
"overhang_with_supports": "./part_section/OVERHANG_wSup.pkl",
},
"layer": {
"base": LAYER_BASE_URLS,
"block": LAYER_BLOCK_URLS,
"overhang_no_supports": LAYER_OVERHANG_NO_SUPPORTS_URLS,
"overhang_with_supports": LAYER_OVERHANG_WITH_SUPPORTS_URLS,
},
"layer_table": {
"base": LAYER_TABLE_BASE_URLS,
"block": LAYER_TABLE_BLOCK_URLS,
"overhang_no_supports": LAYER_TABLE_OVERHANG_NO_SUPPORTS_URLS,
"overhang_with_supports": LAYER_TABLE_OVERHANG_WITH_SUPPORTS_URLS,
}
}
class NISTInSituIN625LPBFOverhangsDataset(datasets.GeneratorBasedBuilder):
VERSION = datasets.Version("0.0.1")
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name="part_section",
description="Original dataset files split by part section",
version=VERSION,
),
datasets.BuilderConfig(
name="layer",
description="Provides layer-wise attributes of entire dataset",
version=VERSION,
),
datasets.BuilderConfig(
name="layer_table",
description="Provides parquet layer-wise attributes of entire dataset",
version=VERSION,
),
]
DEFAULT_CONFIG_NAME = "layer_table"
def _info(self):
features = datasets.Features({
"folder_layer_range": datasets.Value("string"),
"part": datasets.Value("string"),
"part_section": datasets.Value("string"),
"process": datasets.Value("string"),
"source": datasets.Value("string"),
"layer_number": datasets.Value("string"),
"build_time": datasets.Sequence(datasets.Sequence(datasets.Value("float32"))),
"contact_email": datasets.Value("string"),
"file_name": datasets.Value("string"),
"hatch_spacing": datasets.Value("uint32"),
"laser_power": datasets.Value("uint32"),
"layer_thickness": datasets.Value("uint32"),
"material": datasets.Value("string"),
"radiant_temp": datasets.Sequence(datasets.Sequence(datasets.Sequence(datasets.Value("uint32")))),
"build_time": datasets.Sequence(datasets.Sequence(datasets.Value("float32"))),
"s_hvariable__a": datasets.Value("float32"),
"s_hvariable__b": datasets.Value("float32"),
"s_hvariable__c": datasets.Value("float32"),
"scan_speed": datasets.Value("uint32"),
"website": datasets.Value("string"),
})
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
)
def _split_generators(self, dl_manager):
urls = _URLS[self.config.name]
downloaded_files = dl_manager.download_and_extract(urls)
return [
datasets.SplitGenerator(
name="base",
gen_kwargs={
"filepath": downloaded_files["base"],
"split": "base",
}
),
datasets.SplitGenerator(
name="block",
gen_kwargs={
"filepath": downloaded_files["block"],
"split": "block",
}
),
datasets.SplitGenerator(
name="overhang_no_supports",
gen_kwargs={
"filepath": downloaded_files["overhang_no_supports"],
"split": "overhang_no_supports",
}
),
datasets.SplitGenerator(
name="overhang_with_supports",
gen_kwargs={
"filepath": downloaded_files["overhang_with_supports"],
"split": "overhang_with_supports",
}
),
]
def _generate_examples(self, filepath, split):
if self.config.name == "part_section":
with open(filepath, "rb") as f:
layers = pickle.load(f)
for index, layer in enumerate(layers):
yield index, layer
elif self.config.name == "layer":
# layer config has multiple files in filepath variable.
for index, path in enumerate(filepath):
with open(path, "rb") as f:
layer = pickle.load(f)
yield index, layer
elif self.config.name == "layer_table":
# layer config has multiple files in filepath variable.
for index, path in enumerate(filepath):
with open(path, "rb") as f:
table = pq.read_table(f)
layer = table.to_pydict()
non_array = [str, int, float]
converted_layer = {}
for key, value in layer.items():
layer_value = value[0]
if (type(layer_value) in non_array):
converted_layer[key] = layer_value
elif(isinstance(value, list) and "shape" not in key):
shape = layer[f"{key}_shape"][0]
flattened_array = np.array(layer_value)
array = flattened_array.reshape(shape)
converted_layer[key] = array
yield index, converted_layer