# # This file is part of the oe_dataset distribution (https://huggingface.co/datasets/ABC-iRobotics/oe_dataset). # Copyright (c) 2023 ABC-iRobotics. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # """OE dataset""" import sys if sys.version_info < (3, 9): from typing import Sequence, Generator, Tuple else: from collections.abc import Sequence, Generator Tuple = tuple from typing import Optional, IO import datasets import itertools # ---- Constants ---- _CITATION = """\ @ARTICLE{10145828, author={Károly, Artúr István and Tirczka, Sebestyén and Gao, Huijun and Rudas, Imre J. and Galambos, Péter}, journal={IEEE Transactions on Cybernetics}, title={Increasing the Robustness of Deep Learning Models for Object Segmentation: A Framework for Blending Automatically Annotated Real and Synthetic Data}, year={2023}, volume={}, number={}, pages={1-14}, doi={10.1109/TCYB.2023.3276485}} """ _DESCRIPTION = """\ An instance segmentation dataset for robotic manipulation in a tabletop environment. The dataset incorporates real and synthetic images for testing sim-to-real model transfer after fine-tuning. """ _HOMEPAGE = "https://huggingface.co/ABC-iRobotics/oe_dataset" _LICENSE = "GNU General Public License v3.0" _LATEST_VERSIONS = { "all": "1.0.0", "real": "1.0.0", "synthetic": "1.0.0", "photoreal": "1.0.0", "random": "1.0.0", } # ---- OE dataset Configs ---- class OEDatasetConfig(datasets.BuilderConfig): """BuilderConfig for OE dataset.""" def __init__(self, name: str, imgs_urls: Sequence[str], masks_urls: Sequence[str], version: Optional[str] = None, **kwargs): _version = _LATEST_VERSIONS[name] if version is None else version _name = f"{name}_v{_version}" super(OEDatasetConfig, self).__init__(version=datasets.Version(_version), name=_name, **kwargs) self._imgs_urls = {"train": [url + "/train.tar.gz" for url in imgs_urls], "val": [url + "/val.tar.gz" for url in imgs_urls]} self._masks_urls = {"train": [url + "/train.tar.gz" for url in masks_urls], "val": [url + "/val.tar.gz" for url in masks_urls]} @property def features(self): return datasets.Features( { "image": datasets.Image(), "mask": datasets.Image(), } ) @property def supervised_keys(self): return ("image", "mask") # ---- OE dataset Loader ---- class OEDataset(datasets.GeneratorBasedBuilder): """OE dataset.""" BUILDER_CONFIG_CLASS = OEDatasetConfig BUILDER_CONFIGS = [ OEDatasetConfig( name = "photoreal", description = "Photorealistic synthetic images", imgs_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/imgs", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/imgs2", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/imgs3"], masks_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/masks", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/masks2", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/masks3"] ), OEDatasetConfig( name = "random", description = "Domain randomized synthetic images", imgs_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/imgs", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/imgs2"], masks_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/masks", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/masks2"] ), OEDatasetConfig( name = "real", description = "Real images", imgs_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/real/imgs", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/real/imgs2"], masks_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/real/masks", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/real/masks2"] ), OEDatasetConfig( name = "synthetic", description = "Synthetic images", imgs_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/imgs", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/imgs2", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/imgs3", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/imgs", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/imgs2"], masks_urls = ["https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/masks", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/masks2", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/photoreal/masks3", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/masks", "https://huggingface.co/datasets/ABC-iRobotics/oe_dataset/resolve/main/synthetic/random/masks2"] ), ] DEFAULT_WRITER_BATCH_SIZE = 10 def _info(self): return datasets.DatasetInfo( description=_DESCRIPTION, features=self.config.features, supervised_keys=self.config.supervised_keys, homepage=_HOMEPAGE, license=_LICENSE, citation=_CITATION, version=self.config.version, ) def _split_generators(self, dl_manager): train_imgs_paths = dl_manager.download(self.config._imgs_urls["train"]) val_imgs_paths = dl_manager.download(self.config._imgs_urls["val"]) train_masks_paths = dl_manager.download(self.config._masks_urls["train"]) val_masks_paths = dl_manager.download(self.config._masks_urls["val"]) train_imgs_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in train_imgs_paths]) val_imgs_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in val_imgs_paths]) train_masks_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in train_masks_paths]) val_masks_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in val_masks_paths]) return [ datasets.SplitGenerator( name=datasets.Split.TRAIN, gen_kwargs={ "images": train_imgs_gen, "masks": train_masks_gen, }, ), datasets.SplitGenerator( name=datasets.Split.VALIDATION, gen_kwargs={ "images": val_imgs_gen, "masks": val_masks_gen, }, ), ] def _generate_examples( self, images: Generator[Tuple[str,IO], None, None], masks: Generator[Tuple[str,IO], None, None], ): for i, (img_info, mask_info) in enumerate(zip(images, masks)): img_file_path, img_file_obj = img_info mask_file_path, mask_file_obj = mask_info yield i, { "image": {"path": img_file_path, "bytes": img_file_obj.read()}, "mask": {"path": mask_file_path, "bytes": mask_file_obj.read()}, }