Set14 / Set14.py
Eugene Siow
Add data.
5c96471
raw
history blame
4.3 kB
# coding=utf-8
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Set14 dataset: An evaluation dataset for the image super resolution task"""
import datasets
from pathlib import Path
_CITATION = """
@inproceedings{zeyde2010single,
title={On single image scale-up using sparse-representations},
author={Zeyde, Roman and Elad, Michael and Protter, Matan},
booktitle={International conference on curves and surfaces},
pages={711--730},
year={2010},
organization={Springer}
}
"""
_DESCRIPTION = """
Set14 is an evaluation dataset with 14 RGB images for the image super resolution task.
"""
_HOMEPAGE = "https://sites.google.com/site/romanzeyde/research-interests"
_LICENSE = "UNK"
_DL_URL = "https://huggingface.co/datasets/eugenesiow/Set14/resolve/main/data/"
_DEFAULT_CONFIG = "bicubic_x2"
_DATA_OPTIONS = {
"bicubic_x2": {
"hr": _DL_URL + "Set14_HR.tar.gz",
"lr": _DL_URL + "Set14_LR_x2.tar.gz",
},
"bicubic_x3": {
"hr": _DL_URL + "Set14_HR.tar.gz",
"lr": _DL_URL + "Set14_LR_x3.tar.gz",
},
"bicubic_x4": {
"hr": _DL_URL + "Set14_HR.tar.gz",
"lr": _DL_URL + "Set14_LR_x4.tar.gz",
}
}
class Set14Config(datasets.BuilderConfig):
"""BuilderConfig for Set14."""
def __init__(
self,
name,
hr_url,
lr_url,
**kwargs,
):
if name not in _DATA_OPTIONS:
raise ValueError("data must be one of %s" % _DATA_OPTIONS)
super(Set14Config, self).__init__(name=name, version=datasets.Version("1.0.0"), **kwargs)
self.hr_url = hr_url
self.lr_url = lr_url
class Set14(datasets.GeneratorBasedBuilder):
"""Set14 dataset for single image super resolution evaluation."""
BUILDER_CONFIGS = [
Set14Config(
name=key,
hr_url=values['hr'],
lr_url=values['lr']
) for key, values in _DATA_OPTIONS.items()
]
DEFAULT_CONFIG_NAME = _DEFAULT_CONFIG
def _info(self):
features = datasets.Features(
{
"hr": datasets.Value("string"),
"lr": datasets.Value("string"),
}
)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
supervised_keys=None,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
hr_data_dir = dl_manager.download_and_extract(self.config.hr_url)
lr_data_dir = dl_manager.download_and_extract(self.config.lr_url)
return [
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
# These kwargs will be passed to _generate_examples
gen_kwargs={
"lr_path": lr_data_dir,
"hr_path": str(Path(hr_data_dir) / 'Set14_HR')
},
)
]
def _generate_examples(
self, hr_path, lr_path
):
""" Yields examples as (key, example) tuples. """
# This method handles input defined in _split_generators to yield (key, example) tuples from the dataset.
# The `key` is here for legacy reason (tfds) and is not important in itself.
extensions = {'.jpg', '.jpeg', '.png'}
for file_path in sorted(Path(lr_path).glob("**/*")):
if file_path.suffix in extensions:
file_path_str = str(file_path.as_posix())
yield file_path_str, {
'lr': file_path_str,
'hr': str((Path(hr_path) / file_path.name).as_posix())
}