|
import os |
|
import posixpath |
|
|
|
import datasets |
|
from datasets import DatasetInfo, DownloadManager |
|
from fsspec.core import url_to_fs |
|
|
|
|
|
_EXTENSION = [".png", ".jpg", ".jpeg", ".webp", ".bmp"] |
|
|
|
|
|
class DanbooruDataset(datasets.GeneratorBasedBuilder): |
|
BUILDER_CONFIGS = [ |
|
|
|
datasets.BuilderConfig(name="full"), |
|
] |
|
|
|
def _info(self) -> DatasetInfo: |
|
features = { |
|
"image": datasets.Image(), |
|
"post_id": datasets.Value("int64") |
|
} |
|
info = datasets.DatasetInfo( |
|
features=datasets.Features(features), |
|
supervised_keys=None, |
|
citation="", |
|
) |
|
return info |
|
|
|
def _split_generators(self, dl_manager: DownloadManager): |
|
base_path = dl_manager._base_path |
|
if base_path.startswith(datasets.config.HF_ENDPOINT): |
|
base_path = base_path[len(datasets.config.HF_ENDPOINT):].replace("/resolve", "", 1) |
|
base_path = "hf://" + base_path.lstrip("/") |
|
fs, path = url_to_fs(base_path) |
|
urls = fs.glob(posixpath.join(path, "**/*.tar"), detail=False) |
|
archives = dl_manager.download(["hf://" + url for url in urls]) |
|
archives = [dl_manager.iter_archive(archives) for archives in archives] |
|
return [datasets.SplitGenerator(name="train", gen_kwargs={"archives": archives})] |
|
|
|
def _generate_examples(self, archives): |
|
for archive in archives: |
|
for path, f in archive: |
|
path_root, path_ext = os.path.splitext(path) |
|
if path_ext.lower() not in _EXTENSION: |
|
continue |
|
post_id = int(os.path.basename(path_root)) |
|
yield path, {"image": {"bytes": f.read()}, "post_id": post_id} |
|
|