|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
''' |
|
Custom dataset-builder for msynth dataset |
|
''' |
|
|
|
import os |
|
import datasets |
|
import glob |
|
import re |
|
|
|
logger = datasets.logging.get_logger(__name__) |
|
|
|
_CITATION = """\ |
|
@article{sizikova2023knowledge, |
|
title={Knowledge-based in silico models and dataset for the comparative evaluation of mammography AI for a range of breast characteristics, lesion conspicuities and doses}, |
|
author={Sizikova, Elena and Saharkhiz, Niloufar and Sharma, Diksha and Lago, Miguel and Sahiner, Berkman and Delfino, Jana G. and Badano, Aldo}, |
|
journal={Advances in Neural Information Processing Systems}, |
|
volume={}, |
|
pages={16764--16778}, |
|
year={2023} |
|
""" |
|
|
|
|
|
_DESCRIPTION = """\ |
|
M-SYNTH is a synthetic digital mammography (DM) dataset with four breast fibroglandular density distributions imaged using Monte Carlo x-ray simulations with the publicly available Virtual Imaging Clinical Trial for Regulatory Evaluation (VICTRE) toolkit. |
|
Curated by: Elena Sizikova, Niloufar Saharkhiz, Diksha Sharma, Miguel Lago, Berkman Sahiner, Jana Gut Delfino, Aldo Badano |
|
License: Creative Commons 1.0 Universal License (CC0) |
|
""" |
|
|
|
|
|
_HOMEPAGE = "link to the dataset description page (FDA/CDRH/OSEL/DIDSR/VICTRE_project)" |
|
|
|
_REPO = "https://huggingface.co/datasets/didsr/msynth/resolve/main/data" |
|
|
|
|
|
_LESIONDENSITY = ["1.0","1.06", "1.1"] |
|
_DOSE = ["20%","40%","60%","80%","100%"] |
|
_DENSITY = ["fatty", "dense", "hetero","scattered"] |
|
_SIZE = ["5.0","7.0", "9.0"] |
|
_DETECTOR = 'SIM' |
|
|
|
_DOSETABLE = { |
|
"dense": { |
|
"20%": '1.73e09', |
|
"40%": '3.47e09', |
|
"60%": '5.20e09', |
|
"80%": '6.94e09', |
|
"100%": '8.67e09' |
|
}, |
|
"hetero": { |
|
"20%": '2.04e09', |
|
"40%": '4.08e09', |
|
"60%": '6.12e09', |
|
"80%": '8.16e09', |
|
"100%": '1.02e10' |
|
}, |
|
"scattered": { |
|
"20%": '4.08e09', |
|
"40%": '8.16e09', |
|
"60%": '1.22e10', |
|
"80%": '1.63e10', |
|
"100%": '2.04e10' |
|
}, |
|
"fatty": { |
|
"20%": '4.44e09', |
|
"40%": '8.88e09', |
|
"60%": '1.33e10', |
|
"80%": '1.78e10', |
|
"100%": '2.22e10' |
|
} |
|
} |
|
|
|
_URLS = { |
|
"meta_data": f"{_REPO}/metadata/bounds.zip", |
|
"read_me": f"{_REPO}/README.md" |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
DATA_DIR = {"all_data": "SIM", "seg": "SIM", "info": "bounds"} |
|
|
|
class msynthConfig(datasets.BuilderConfig): |
|
"""msynth dataset""" |
|
lesion_density = _LESIONDENSITY |
|
dose = _DOSE |
|
density = _DENSITY |
|
size = _SIZE |
|
def __init__(self, name, **kwargs): |
|
super(msynthConfig, self).__init__( |
|
version=datasets.Version("1.0.0"), |
|
name=name, |
|
description="msynth", |
|
**kwargs, |
|
) |
|
|
|
class msynth(datasets.GeneratorBasedBuilder): |
|
"""msynth dataset.""" |
|
|
|
DEFAULT_WRITER_BATCH_SIZE = 256 |
|
BUILDER_CONFIGS = [ |
|
msynthConfig("device_data"), |
|
msynthConfig("segmentation_mask"), |
|
msynthConfig("metadata"), |
|
] |
|
|
|
def _info(self): |
|
if self.config.name == "device_data": |
|
|
|
features = datasets.Features( |
|
{ |
|
"Raw": datasets.Value("string"), |
|
"mhd": datasets.Value("string"), |
|
"loc": datasets.Value("string"), |
|
"dcm": datasets.Value("string"), |
|
"density": datasets.Value("string"), |
|
"mass_radius": datasets.Value("float32") |
|
} |
|
) |
|
|
|
elif self.config.name == "segmentation_mask": |
|
|
|
features = datasets.Features( |
|
{ |
|
"Raw": datasets.Value("string"), |
|
"mhd": datasets.Value("string"), |
|
"loc": datasets.Value("string"), |
|
"density": datasets.Value("string"), |
|
"mass_radius": datasets.Value("string") |
|
} |
|
) |
|
|
|
elif self.config.name == "metadata": |
|
|
|
features = datasets.Features( |
|
{ |
|
"fatty": datasets.Value("string"), |
|
"dense": datasets.Value("string"), |
|
"hetero": datasets.Value("string"), |
|
"scattered": datasets.Value("string") |
|
} |
|
) |
|
|
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=features, |
|
supervised_keys=None, |
|
homepage=_HOMEPAGE, |
|
citation=_CITATION, |
|
) |
|
|
|
def _split_generators( |
|
self, dl_manager: datasets.utils.download_manager.DownloadManager): |
|
|
|
if self.config.lesion_density == "all": |
|
self.config.lesion_density = _LESIONDENSITY |
|
|
|
if self.config.dose == "all": |
|
self.config.dose = _DOSE |
|
|
|
if self.config.density == "all": |
|
self.config.density = _DENSITY |
|
|
|
if self.config.size == "all": |
|
self.config.size = _SIZE |
|
|
|
|
|
if self.config.name == "device_data": |
|
file_name = [] |
|
for ld in self.config.lesion_density: |
|
for ds in self.config.dose: |
|
for den in self.config.density: |
|
value = _DOSETABLE[den][ds] |
|
for sz in self.config.size: |
|
temp_name = [] |
|
temp_name = ( |
|
"device_data_VICTREPhantoms_spic_" |
|
+ ld |
|
+ "/" |
|
+ value |
|
+ "/" |
|
+ den |
|
+ "/2/" |
|
+ sz |
|
+ "/" |
|
+ _DETECTOR |
|
+ ".zip" |
|
) |
|
file_name.append(_REPO +"/"+ temp_name) |
|
|
|
|
|
|
|
data_dir = [] |
|
for url in file_name: |
|
try: |
|
temp_down_file = [] |
|
|
|
temp_down_file = dl_manager.download_and_extract(url) |
|
data_dir.append(temp_down_file) |
|
|
|
except Exception as e: |
|
|
|
logger.error(f"Failed to download {url}: {e}") |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name="device_data", |
|
gen_kwargs={ |
|
"files": [data_dir_t for data_dir_t in data_dir], |
|
"name": "all_data", |
|
}, |
|
), |
|
] |
|
|
|
elif self.config.name == "segmentation_mask": |
|
seg_file_name = [] |
|
for den in self.config.density: |
|
for sz in self.config.size: |
|
temp_name = [] |
|
temp_name = ( |
|
"segmentation_masks" |
|
+ "/" |
|
+ den |
|
+ "/2/" |
|
+ sz |
|
+ "/" |
|
+ _DETECTOR |
|
+ ".zip" |
|
) |
|
seg_file_name.append(_REPO+ "/" + temp_name) |
|
|
|
|
|
seg_dir = [] |
|
|
|
|
|
for url in seg_file_name: |
|
try: |
|
|
|
temp_down_file = [] |
|
temp_down_file = dl_manager.download_and_extract(url) |
|
seg_dir.append(temp_down_file) |
|
|
|
except Exception as e: |
|
|
|
logger.error(f"Failed to download {url}: {e}") |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name="segmentation_mask", |
|
gen_kwargs={ |
|
"files": [data_dir_t for data_dir_t in seg_dir], |
|
"name": "seg", |
|
}, |
|
), |
|
] |
|
|
|
elif self.config.name == "metadata": |
|
meta_dir = dl_manager.download_and_extract(_URLS['meta_data']) |
|
return [ |
|
datasets.SplitGenerator( |
|
name="metadata", |
|
gen_kwargs={ |
|
"files": meta_dir, |
|
"name": "info", |
|
}, |
|
), |
|
] |
|
|
|
|
|
def get_all_file_paths(self, root_directory): |
|
file_paths = [] |
|
|
|
|
|
for folder, _, files in os.walk(root_directory): |
|
for file in files: |
|
if file.endswith('.raw'): |
|
|
|
file_path = os.path.join(folder, file) |
|
file_paths.append(file_path) |
|
return file_paths |
|
|
|
def get_support_file_path(self, raw_file_path, ext): |
|
folder_path = os.path.dirname(raw_file_path) |
|
|
|
raw_file_name = os.path.basename(raw_file_path) |
|
|
|
root, extension = os.path.splitext(raw_file_name) |
|
if ext == "dcm": |
|
supp_file_name = f"000.{ext}" |
|
file_path = os.path.join(folder_path,"DICOM_dm",supp_file_name) |
|
else: |
|
supp_file_name = f"{root}.{ext}" |
|
file_path = os.path.join(folder_path, supp_file_name) |
|
|
|
if os.path.isfile(file_path): |
|
return file_path |
|
else: |
|
return "Not available for this raw file" |
|
|
|
def sort_file_paths(self, file_paths): |
|
digit_numbers = [] |
|
for file_path in file_paths: |
|
for word in _DENSITY: |
|
if word in file_path: |
|
if self.config.name == "device_data": |
|
pattern = rf"{word}.(\d+\.)(\d+)" |
|
elif self.config.name == "segmentation_mask": |
|
pattern = rf"{word}.(\d+)" |
|
match = re.search(pattern, file_path) |
|
if self.config.name == "device_data": |
|
digit_numbers.append(int(match.group(2))) |
|
elif self.config.name == "segmentation_mask": |
|
digit_numbers.append(int(match.group(1))) |
|
break |
|
|
|
|
|
|
|
sorted_numbers_with_indices = sorted(enumerate(digit_numbers), key=lambda x: x[1]) |
|
|
|
|
|
sorted_indices, sorted_numbers = zip(*sorted_numbers_with_indices) |
|
|
|
|
|
sorted_file_paths = [file_paths[i] for i in sorted_indices] |
|
|
|
return sorted_file_paths |
|
|
|
def _generate_examples(self, files, name): |
|
if self.config.name == "device_data": |
|
key = 0 |
|
data_dir = [] |
|
for folder in files: |
|
tmp_dir = [] |
|
tmp_dir = self.get_all_file_paths(os.path.join(folder, DATA_DIR[name])) |
|
data_dir = data_dir + tmp_dir |
|
|
|
data_dir = self.sort_file_paths(data_dir) |
|
for path in data_dir: |
|
res_dic = {} |
|
for word in _DENSITY: |
|
if word in path: |
|
breast_density = word |
|
pattern = rf"(\d+\.\d+)_{word}" |
|
match = re.search(pattern, path) |
|
matched_text = match.group(1) |
|
break |
|
|
|
|
|
image_id = os.path.basename(path) |
|
|
|
root, extension = os.path.splitext(image_id) |
|
|
|
image_labels = extension.lstrip(".") |
|
res_dic["Raw"] = path |
|
res_dic["mhd"] = self.get_support_file_path(path, "mhd") |
|
res_dic["loc"] = self.get_support_file_path(path, "loc") |
|
res_dic["dcm"] = self.get_support_file_path(path, "dcm") |
|
res_dic["density"] = breast_density |
|
res_dic["mass_radius"] = matched_text |
|
|
|
yield key, res_dic |
|
key += 1 |
|
|
|
|
|
if self.config.name == "segmentation_mask": |
|
key = 0 |
|
data_dir = [] |
|
examples = [] |
|
for folder in files: |
|
tmp_dir = [] |
|
tmp_dir = self.get_all_file_paths(os.path.join(folder, DATA_DIR[name])) |
|
data_dir = data_dir + tmp_dir |
|
|
|
data_dir = self.sort_file_paths(data_dir) |
|
|
|
new_data_dir = []; |
|
count = 1; |
|
loc = 0; |
|
while loc < len(data_dir): |
|
if count % 2 == 1: |
|
new_data_dir.append(data_dir[loc]) |
|
loc += 1 |
|
else: |
|
new_data_dir.append("None") |
|
count += 1 |
|
new_data_dir.append("None") |
|
|
|
for path in new_data_dir: |
|
res_dic = {} |
|
if path == "None": |
|
res_dic["Raw"] = "None" |
|
res_dic["mhd"] = "None" |
|
res_dic["loc"] = "None" |
|
res_dic["density"] = "None" |
|
res_dic["mass_radius"] = "None" |
|
|
|
else: |
|
for word in _DENSITY: |
|
if word in path: |
|
breast_density = word |
|
pattern = rf"(\d+\.\d+)_{word}" |
|
match = re.search(pattern, path) |
|
matched_text = match.group(1) |
|
break |
|
|
|
image_id = os.path.basename(path) |
|
|
|
root, extension = os.path.splitext(image_id) |
|
|
|
image_labels = extension.lstrip(".") |
|
res_dic["Raw"] = path |
|
res_dic["mhd"] = self.get_support_file_path(path, "mhd") |
|
res_dic["loc"] = self.get_support_file_path(path, "loc") |
|
res_dic["density"] = breast_density |
|
res_dic["mass_radius"] = matched_text |
|
|
|
examples.append(res_dic) |
|
|
|
for example in examples: |
|
yield key, {**example} |
|
key += 1 |
|
examples = [] |
|
|
|
if self.config.name == "metadata": |
|
key = 0 |
|
examples = list() |
|
meta_dir = os.path.join(files, DATA_DIR[name]) |
|
|
|
res_dic = { |
|
"fatty": os.path.join(meta_dir,'bounds_fatty.npy'), |
|
"dense": os.path.join(meta_dir,'bounds_dense.npy'), |
|
"hetero": os.path.join(meta_dir,'bounds_hetero.npy'), |
|
"scattered": os.path.join(meta_dir,'bounds_scattered.npy') |
|
} |
|
yield key, res_dic |
|
key +=1 |