|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""OE dataset""" |
|
|
|
import sys |
|
if sys.version_info < (3, 9): |
|
from typing import Sequence, Generator, Tuple |
|
else: |
|
from collections.abc import Sequence, Generator |
|
Tuple = tuple |
|
|
|
from typing import Optional, IO |
|
|
|
import datasets |
|
import itertools |
|
|
|
|
|
|
|
|
|
_CITATION = """\ |
|
@ARTICLE{10145828, |
|
author={Károly, Artúr István and Tirczka, Sebestyén and Gao, Huijun and Rudas, Imre J. and Galambos, Péter}, |
|
journal={IEEE Transactions on Cybernetics}, |
|
title={Increasing the Robustness of Deep Learning Models for Object Segmentation: A Framework for Blending Automatically Annotated Real and Synthetic Data}, |
|
year={2023}, |
|
volume={}, |
|
number={}, |
|
pages={1-14}, |
|
doi={10.1109/TCYB.2023.3276485}} |
|
|
|
""" |
|
|
|
_DESCRIPTION = """\ |
|
An instance segmentation dataset for robotic manipulation in a tabletop environment. |
|
The dataset incorporates real and synthetic images for testing sim-to-real model transfer after fine-tuning. |
|
""" |
|
|
|
_HOMEPAGE = "https://huggingface.co/ABC-iRobotics/oe_dataset" |
|
|
|
_LICENSE = "GNU General Public License v3.0" |
|
|
|
_LATEST_VERSIONS = { |
|
"all": "1.0.0", |
|
"real": "1.0.0", |
|
"synthetic": "1.0.0", |
|
"photoreal": "1.0.0", |
|
"random": "1.0.0", |
|
} |
|
|
|
|
|
|
|
|
|
|
|
class OEDatasetConfig(datasets.BuilderConfig): |
|
"""BuilderConfig for OE dataset.""" |
|
|
|
def __init__(self, name: str, imgs_urls: Sequence[str], masks_urls: Sequence[str], version: Optional[str] = None, **kwargs): |
|
_version = _LATEST_VERSIONS[name] if version is None else version |
|
_name = f"{name}_v{_version}" |
|
super(OEDatasetConfig, self).__init__(version=datasets.Version(_version), name=_name, **kwargs) |
|
self._imgs_urls = {"train": [url + "/train.tar.gz" for url in imgs_urls], "val": [url + "/val.tar.gz" for url in imgs_urls]} |
|
self._masks_urls = {"train": [url + "/train.tar.gz" for url in masks_urls], "val": [url + "/val.tar.gz" for url in masks_urls]} |
|
|
|
@property |
|
def features(self): |
|
return datasets.Features( |
|
{ |
|
"image": datasets.Image(), |
|
"mask": datasets.Image(), |
|
} |
|
) |
|
|
|
@property |
|
def supervised_keys(self): |
|
return ("image", "mask") |
|
|
|
|
|
|
|
|
|
|
|
class OEDataset(datasets.GeneratorBasedBuilder): |
|
"""OE dataset.""" |
|
|
|
BUILDER_CONFIG_CLASS = OEDatasetConfig |
|
BUILDER_CONFIGS = [ |
|
OEDatasetConfig( |
|
name = "photoreal", |
|
description = "Photorealistic synthetic images", |
|
imgs_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/imgs"], |
|
masks_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/masks"] |
|
), |
|
] |
|
|
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=self.config.features, |
|
supervised_keys=self.config.supervised_keys, |
|
homepage=_HOMEPAGE, |
|
license=_LICENSE, |
|
citation=_CITATION, |
|
version=self.config.version, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
train_imgs_paths = dl_manager.download(self.config._imgs_urls["train"]) |
|
val_imgs_paths = dl_manager.download(self.config._imgs_urls["val"]) |
|
|
|
train_masks_paths = dl_manager.download(self.config._masks_urls["train"]) |
|
val_masks_paths = dl_manager.download(self.config._masks_urls["val"]) |
|
|
|
train_imgs_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in train_imgs_paths]) |
|
val_imgs_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in val_imgs_paths]) |
|
|
|
train_masks_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in train_masks_paths]) |
|
val_masks_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in val_masks_paths]) |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={ |
|
"images": train_imgs_gen, |
|
"masks": train_masks_gen, |
|
}, |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.VALIDATION, |
|
gen_kwargs={ |
|
"images": val_imgs_gen, |
|
"masks": val_masks_gen, |
|
}, |
|
), |
|
] |
|
|
|
def _generate_examples( |
|
self, |
|
images: Generator[Tuple[str,IO], None, None], |
|
masks: Generator[Tuple[str,IO], None, None], |
|
): |
|
for i, (img_info, mask_info) in enumerate(zip(images, masks)): |
|
img_file_path, img_file_obj = img_info |
|
mask_file_path, mask_file_obj = mask_info |
|
yield i, { |
|
"image": {"path": img_file_path, "bytes": img_file_obj.read()}, |
|
"mask": {"path": mask_file_path, "bytes": mask_file_obj.read()}, |
|
} |