|
|
|
import datasets |
|
from huggingface_hub import HfApi |
|
from datasets import DownloadManager, DatasetInfo |
|
from datasets.data_files import DataFilesDict |
|
import os |
|
import json |
|
|
|
|
|
|
|
|
|
|
|
_NAME = "mickylan2367/spectrogram_musicCaps" |
|
_EXTENSION = [".png"] |
|
_REVISION = "main" |
|
|
|
|
|
|
|
_HOMEPAGE = "https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps" |
|
|
|
_DESCRIPTION = f"""\ |
|
{_NAME} Datasets including spectrogram.png file from Google MusicCaps Datasets! |
|
Using for Project Learning... |
|
""" |
|
|
|
|
|
_IMAGES_DIR = "mickylan2367/images/data/" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class spectrogram_musicCapsConfig(datasets.BuilderConfig): |
|
"""Builder Config for spectrogram_MusicCaps""" |
|
|
|
def __init__(self, metadata_urls, **kwargs): |
|
"""BuilderConfig |
|
Args: |
|
data_url: `string`, url to download the zip file from. |
|
metadata_urls: dictionary with keys 'train' and 'validation' containing the archive metadata URLs |
|
**kwargs: keyword arguments forwarded to super. |
|
""" |
|
super(spectrogram_musicCapsConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs) |
|
|
|
self.metadata_urls = metadata_urls |
|
|
|
class spectrogram_musicCaps(datasets.GeneratorBasedBuilder): |
|
|
|
|
|
BUILDER_CONFIGS = [ |
|
spectrogram_musicCapsConfig( |
|
name="MusicCaps data 0_10", |
|
description="Datasets from MusicCaps by Mikan", |
|
|
|
metadata_urls = { |
|
"train":"https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/metadata0_10.jsonl" |
|
} |
|
), |
|
|
|
spectrogram_musicCapsConfig( |
|
name="MusicCpas data 10_100", |
|
description="Datasets second action by Mikan", |
|
|
|
metadata_urls = { |
|
"train" : "https://huggingface.co/datasets/mickylan2367/spectrogram_musicCaps/blob/main/data/metadata10_200.jsonl" |
|
} |
|
) |
|
] |
|
|
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=datasets.Features( |
|
{ |
|
"image": datasets.Image(), |
|
"caption": datasets.Value("string") |
|
} |
|
), |
|
supervised_keys=("image", "caption"), |
|
homepage=_HOMEPAGE, |
|
|
|
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _split_generators(self, dl_manager: DownloadManager): |
|
|
|
hfh_dataset_info = HfApi().dataset_info(_NAME, revision=_REVISION, timeout=100.0) |
|
|
|
split_metadata_paths = dl_manager.download(self.config.metadata_urls) |
|
|
|
|
|
data_files = DataFilesDict.from_hf_repo( |
|
{datasets.Split.TRAIN: ["**"]}, |
|
dataset_info=hfh_dataset_info, |
|
allowed_extensions=["zip", ".zip"], |
|
) |
|
|
|
gs = [] |
|
for split, files in data_files.items(): |
|
downloaded_files = dl_manager.download_and_extract(files) |
|
|
|
gs.append( |
|
datasets.SplitGenerator( |
|
name = split, |
|
gen_kwargs={ |
|
"images" : downloaded_files, |
|
"metadata_path": split_metadata_paths["train"] |
|
} |
|
) |
|
) |
|
return gs |
|
|
|
def _generate_examples(self, images, metadata_path): |
|
"""Generate images and captions for splits.""" |
|
|
|
|
|
with open(metadata_path) as fin: |
|
for idx, line in enumerate(fin): |
|
data = json.loads(line) |
|
|
|
yield data["file_name"], { |
|
"image": data["file_name"], |
|
"caption":data["caption"] |
|
} |