|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import os |
|
import gzip |
|
import json |
|
|
|
import datasets |
|
|
|
|
|
_DESCRIPTION = """ """ |
|
_HOMEPAGE_URL = "" |
|
_CITATION = """""" |
|
|
|
_VERSION = "0.1" |
|
_BASE_NAME = "" |
|
_BASE_URL = "data/data.{}.{}.json.gz" |
|
|
|
|
|
_LANGUAGES = [ |
|
"ca", "en", "de", "es", "fr", |
|
"ru", "ja", "it", "zh", "pt", |
|
"nl", "tr", "pl", "vi", "ar", |
|
"id", "uk", "ro", "no", "th", |
|
"sv", "el", "fi", "he", "da", |
|
"cs", "ko", "fa", "hi", "hu", |
|
"sk", "lt", "et", "hr", "is", |
|
"lv", "ms", "bg", "sr", |
|
] |
|
_SCOPES = ["faq", "cqa"] |
|
_LEVELS = ["domain", "page", "question"] |
|
|
|
|
|
class MQAConfig(datasets.BuilderConfig): |
|
def __init__(self, *args, language="en", scope="all", level="question", **kwargs): |
|
super().__init__( |
|
*args, |
|
name=f"{language}-{scope}-{level}", |
|
**kwargs, |
|
) |
|
self.language = language |
|
self.scope = scope |
|
self.level = level |
|
|
|
|
|
class MQA(datasets.GeneratorBasedBuilder): |
|
BUILDER_CONFIGS = [] |
|
for language in _LANGUAGES: |
|
for scope in _SCOPES: |
|
for level in _LEVELS: |
|
BUILDER_CONFIGS.append(MQAConfig(language=language, scope=scope, level=level)) |
|
for language in _LANGUAGES: |
|
BUILDER_CONFIGS.append(MQAConfig(language=language, scope="all", level=level)) |
|
for scope in _SCOPES: |
|
BUILDER_CONFIGS.append(MQAConfig(language="all", scope=scope, level=level)) |
|
BUILDER_CONFIG_CLASS = MQAConfig |
|
|
|
def _info(self): |
|
question = { |
|
"id": datasets.Value("string"), |
|
"text": datasets.Value("string"), |
|
"name": datasets.Value("string"), |
|
"domain": datasets.Value("string"), |
|
"bucket": datasets.Value("string"), |
|
"answers": [{ |
|
"text": datasets.Value("string"), |
|
"name": datasets.Value("string"), |
|
"is_accepted": datasets.Value("bool"), |
|
}] |
|
} |
|
page = { |
|
"id": datasets.Value("string"), |
|
"bucket": datasets.Value("string"), |
|
"domain": datasets.Value("string"), |
|
"questions": [question] |
|
} |
|
domain = { |
|
"domain": datasets.Value("string"), |
|
"pages": [page] |
|
} |
|
if self.config.level == "question": |
|
features = question |
|
elif self.config.level == "page": |
|
features = page |
|
elif self.config.level == "domain": |
|
features = domain |
|
else: |
|
raise NotImplementedError() |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=datasets.Features(features), |
|
supervised_keys=None, |
|
homepage=_HOMEPAGE_URL, |
|
citation=_CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
filenames = [] |
|
languages = _LANGUAGES if self.config.language == "all" else [self.config.language] |
|
scopes = _SCOPES if self.config.scope == "all" else [self.config.scope] |
|
for language in languages: |
|
for scope in scopes: |
|
path = dl_manager.download_and_extract(_BASE_URL.format(language, scope)) |
|
filenames.append(path) |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={"filenames": filenames}, |
|
) |
|
] |
|
|
|
def _generate_examples(self, filenames): |
|
|
|
def default(e, key, default_value=""): |
|
if e[key] is None: |
|
return default_value |
|
return e[key] |
|
|
|
for filename in filenames: |
|
with open(filename, "r") as f: |
|
domain = [] |
|
previous_domain = '' |
|
for line in f: |
|
page = json.loads(line) |
|
questions = [{ |
|
"text": default(question, "text"), |
|
"name": default(question, "name"), |
|
"domain": page["domain"], |
|
"bucket": page["bucket"], |
|
"id": question["hash"], |
|
"answers": [{ |
|
"text": default(answer, "text"), |
|
"name": default(answer, "name"), |
|
"is_accepted": answer["is_accepted"] |
|
} for answer in question["answers"]] |
|
} for question in page["questions"]] |
|
page = { |
|
"id": page["page_hash"], |
|
"domain": page["domain"], |
|
"bucket": page["bucket"], |
|
"questions": questions |
|
} |
|
if self.config.level == "question": |
|
for question in questions: |
|
yield question["id"], question |
|
if self.config.level == "page": |
|
yield page["id"], page |
|
if self.config.level == "domain": |
|
if page["domain"] == previous_domain or previous_domain == "": |
|
domain.append(page) |
|
else: |
|
yield previous_domain, { |
|
"domain": previous_domain, |
|
"pages": domain |
|
} |
|
domain = [] |
|
previous_domain = page["domain"] |