Datasets:
File size: 4,810 Bytes
a824956 72140da a824956 72140da 7f17d57 a824956 72140da a824956 7f17d57 a824956 7f17d57 a824956 72140da 7f17d57 72140da 7f17d57 72140da a824956 72140da a824956 72140da a824956 72140da a824956 7f17d57 a824956 7f17d57 a824956 7f17d57 72140da 7f17d57 a824956 7f17d57 a824956 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 |
"""
Prepares PES2O for release on the HugginFace hub.
Author: Luca Soldaini (@soldni)
"""
import argparse
import json
from contextlib import ExitStack
from functools import partial
from multiprocessing import Manager, Pool, cpu_count, set_start_method
from pathlib import Path
from queue import Queue
from threading import Thread
from time import sleep
import re
from typing import List, Optional, Tuple
from smashed.utils import (
MultiPath,
recursively_list_files,
)
import smart_open
from tqdm import tqdm
def process_single(
io_paths: Tuple[List[MultiPath], MultiPath],
version: str,
pbar_queue: Optional[Queue] = None,
):
all_src, dst = io_paths
docs_cnt = 0
if dst.is_local:
Path(dst.as_str).parent.mkdir(parents=True, exist_ok=True)
with smart_open.open(dst.as_str, "wt") as out_stream:
for src in all_src:
with smart_open.open(src.as_str, "rt") as in_stream:
for line in in_stream:
data = json.loads(line)
data.pop("metadata", None)
data["source"] = (
("s2ag" if "dataset=s2ag" in src.as_str else "s2orc") +
'/' +
("train" if "split=train" in src.as_str else "valid")
)
data["version"] = version
out_stream.write(json.dumps(data) + "\n")
docs_cnt += 1
if pbar_queue is not None and docs_cnt % 10000 == 0:
pbar_queue.put((0, docs_cnt))
docs_cnt = 0
if pbar_queue is not None:
pbar_queue.put((1, 0))
if pbar_queue is not None:
pbar_queue.put((0, docs_cnt))
def threaded_progressbar(q: Queue, timeout: float, total_files: Optional[int] = None):
with ExitStack() as stack:
files_pbar = stack.enter_context(
tqdm(desc=" Files", unit="files", position=0, total=total_files)
)
docs_pbar = stack.enter_context(
tqdm(desc=" Docs", unit=" docs", position=1, unit_scale=True)
)
while True:
item = q.get()
if item is None:
break
else:
files, docs = item
files_pbar.update(files)
docs_pbar.update(docs)
sleep(timeout)
def main():
ap = argparse.ArgumentParser()
ap.add_argument("src", type=str, help="Source path")
ap.add_argument("dst", type=str, help="Destination path")
ap.add_argument("--debug", default=False, help="Debug mode", action="store_true")
ap.add_argument(
"--parallel", type=int, default=cpu_count(), help="Number of parallel processes"
)
ap.add_argument(
"-v", "--version", type=str, required=True, help="Version of the dataset"
)
opts = ap.parse_args()
src = MultiPath.parse(opts.src)
dst = MultiPath.parse(opts.dst)
# catch all in top level directory
grouped_src_paths = {}
for single_src in recursively_list_files(src):
single_src_dir, _ = single_src.rsplit("/", 1)
grouped_src_paths.setdefault(single_src_dir, []).append(single_src)
src_paths: List[List[MultiPath]] = []
dst_paths: List[MultiPath] = []
for dir_name, dir_values in grouped_src_paths.items():
src_paths.append([MultiPath.parse(p) for p in dir_values])
dir_path = MultiPath.parse(dir_name.replace("part_id=", "") + ".gz")
dst_path = dst / (diff) if len(diff := (dir_path - src)) > 0 else dst
dst_path.path = re.sub(r'dataset=(\w+)/', '\\1.', dst_path.path)
dst_path.path = re.sub(r'split=(\w+)/', '\\1.', dst_path.path)
dst_paths.append(dst_path)
if opts.debug:
with tqdm(total=len(src_paths)) as pbar:
for single_src, single_dst in zip(src_paths, dst_paths):
process_single((single_src, single_dst), version=opts.version)
pbar.update(1)
else:
set_start_method("spawn")
with Pool(processes=opts.parallel) as pool:
pbar_queue: Queue = (manager := Manager()).Queue()
pbar_thread = Thread(
target=threaded_progressbar,
args=(pbar_queue, 0.1, sum(len(p) for p in src_paths)),
daemon=True,
)
pbar_thread.start()
for _ in pool.imap_unordered(
partial(process_single, pbar_queue=pbar_queue, version=opts.version),
tuple(zip(src_paths, dst_paths)),
):
...
pool.close()
pool.join()
pbar_queue.put(None)
pbar_thread.join()
manager.shutdown()
if __name__ == "__main__":
main()
|