Datasets:
Languages:
Ukrainian
Size:
10B<n<100B
import json | |
import datasets | |
logger = datasets.logging.get_logger(__name__) | |
_DESCRIPTION = """\ | |
Malyuk — a large, compiled corpus of ukrainian language texts. | |
113GB of text data in jsonl format. | |
Combination of UberText 2.0, Ukrainian part of the Oscar, and Ukrainian News. | |
Nothing is guaranteed. Use at your own risk. | |
""" | |
_URLS = [ | |
"combined_ds_pt00.jsonlines", | |
"combined_ds_pt01.jsonlines", | |
"combined_ds_pt02.jsonlines", | |
"combined_ds_pt03.jsonlines", | |
"combined_ds_pt04.jsonlines", | |
"combined_ds_pt05.jsonlines", | |
"combined_ds_pt06.jsonlines", | |
"combined_ds_pt07.jsonlines", | |
"combined_ds_pt08.jsonlines", | |
"combined_ds_pt09.jsonlines", | |
"combined_ds_pt10.jsonlines", | |
"combined_ds_pt11.jsonlines", | |
"combined_ds_pt12.jsonlines", | |
"combined_ds_pt13.jsonlines", | |
"combined_ds_pt14.jsonlines", | |
"combined_ds_pt15.jsonlines", | |
"combined_ds_pt16.jsonlines", | |
"combined_ds_pt17.jsonlines", | |
"combined_ds_pt18.jsonlines", | |
"combined_ds_pt19.jsonlines", | |
"combined_ds_pt20.jsonlines", | |
"combined_ds_pt21.jsonlines", | |
"combined_ds_pt22.jsonlines", | |
] | |
class MalyukDataset(datasets.GeneratorBasedBuilder): | |
""" | |
Malyuk Dataset | |
~^^^^^^:::^?7!!!!!~~~~~~~~~~~~~~^^^^^^::::::.............:::::::::^^:::::::^^^^^~~~~~~~~~~!:........ | |
~^^^^^^:::^7!~!!!!!~~~~~~~^^^^^^^~~~~^^^^^^^::::::::::::^^^^^^^^^^:::::::::::^^~~~~~!!!~~!!:........ | |
~~~^^^^:::^7!~!!!~~~~~~^^^^^^^^^~~~~~~~~~~~~~~~~~~~!!~~~~!~~~~~~~~~~~~~~~^^:::^^~~~~~~!!~!7^........ | |
~~~^^^^^^:~7!!!~~!~~~~~~~~!!!!!!!!777777777777!!!~~!777???JJYY55YYJ?7?777777!~~~^~~~~~!77??^........ | |
!~~~^^^^^:~?!!!!!!!!!!7?Y55555555PBGPPPPPPPP5YJJ?777JY5PPPG########BBBBGGGGGP5Y?!~!~~~~77??:........ | |
7!~~^^^^^:^7!!!!!!!7?YPGBBGGGGGGGBB########BBGGPP5YY5GB####&&&&&##BGGGBB####BBG5?!!~~~~~!?7......... | |
7!~~^~~~~~^!!!!!!!!?YPGGGP55PPGBB####&&&&&&##BBPJ7!~!75##&&&&&#BB###BBGGGB###BGPY?7~~~~^~7~:::^^^... | |
?7~~^JPJ7!7?~!!!77?J5PPPGBB#BGG&&@&#PG#&&&##BBGJ!^::::!5BBB###GY5#&&&#GG##BBBBGP5?7~~^^^^~~^~!?P5:.. | |
J7~~^JBGJ??7~!!!!7?JJ5PGGBBBG5J5GBG5Y5GBBGGGPPJ7!~::.:^!J555GPP5JY555JJ5GGGP55Y?!~~^^^^^^~!JY5B#?... | |
J7!~~7PY?J??7!!!!!!7?JYYYYY5555555PPPP55YYYYYJ?!~~^:::^~!?JJJJY55YY555555Y?7!~~^^^^^^~~~^7Y55JYP~... | |
J7!~~!J7?YYJ7!!!!!!!!777???JJY55555YYJJ????????7!~^:::^~!777!!!7??????77!~^^::::::^^^~!~~7PP5J7?~... | |
J7!~~~!?Y5PG?7!!!!~~~~~~~~!!777777??777777????77!!~^::^!!777!~~^~~!!~~^:::...::::^^~~~!!!7GGPY!~:... | |
?7!!~~~7YPGGJ7!!!!!~~~~~~~~~~~~~~!!~~~~!77????77!!~^^:^~!77?7!~^^::^^^^^^^:^^^^^^~~~!!!77?PPY7~^.... | |
?77!~~~!?5PPJ77!!!!!!~!!!!~~~~~~~^^^~~~!7?J?777!!~^^:::^!!7?J!~^^::::::^^~~~~~~~~~!!!!7??7YY?~~^.... | |
""" | |
VERSION = datasets.Version("1.0.0") | |
DEFAULT_CONFIG_NAME = "default" | |
BUILDER_CONFIGS = [ | |
datasets.BuilderConfig(name="default", version=VERSION, description=""), | |
] | |
def _info(self): | |
return datasets.DatasetInfo( | |
description=_DESCRIPTION, | |
features=datasets.Features( | |
{ | |
"id": datasets.Value("string"), | |
"compound_id": datasets.Value("string"), | |
"text": datasets.Value("string"), | |
} | |
), | |
) | |
def _split_generators(self, dl_manager): | |
downloaded_files = dl_manager.download(_URLS) | |
return [ | |
datasets.SplitGenerator( | |
name=datasets.Split.TRAIN, gen_kwargs={"filepaths": downloaded_files} | |
) | |
] | |
def _generate_examples(self, filepaths): | |
"""This function returns the examples in the raw (text) form.""" | |
logger.info("generating examples from = %s", filepaths) | |
key = 0 | |
for path in filepaths: | |
with open(path, encoding="utf-8") as f: | |
for instruction_str in f: | |
instruction = json.loads(instruction_str) | |
yield key, instruction | |
key += 1 | |