|
""" |
|
Prepares PES2O for release on the HugginFace hub. |
|
|
|
Author: Luca Soldaini (@soldni) |
|
""" |
|
|
|
import argparse |
|
import json |
|
from contextlib import ExitStack |
|
from functools import partial |
|
from multiprocessing import Manager, Pool, cpu_count, set_start_method |
|
from queue import Queue |
|
from threading import Thread |
|
from time import sleep |
|
from typing import Optional, Tuple |
|
|
|
from smashed.utils import ( |
|
MultiPath, |
|
compress_stream, |
|
decompress_stream, |
|
open_file_for_write, |
|
recursively_list_files, |
|
stream_file_for_read, |
|
) |
|
from tqdm import tqdm |
|
|
|
|
|
def process_single( |
|
io_paths: Tuple[MultiPath, MultiPath], |
|
version: str, |
|
pbar_queue: Optional[Queue] = None, |
|
): |
|
src, dst = io_paths |
|
docs_cnt = 0 |
|
|
|
with ExitStack() as stack: |
|
in_file = stack.enter_context(stream_file_for_read(src, "rb")) |
|
in_stream = stack.enter_context(decompress_stream(in_file, "rt")) |
|
out_file = stack.enter_context(open_file_for_write(dst, "wb")) |
|
out_stream = stack.enter_context(compress_stream(out_file, "wt")) |
|
|
|
for line in in_stream: |
|
data = json.loads(line) |
|
data.pop("metadata", None) |
|
data["source"] = "s2ag" if "dataset=s2ag" in src.as_str else "s2orc" |
|
data["split"] = "train" if "split=train" in src.as_str else "valid" |
|
data["version"] = version |
|
|
|
out_stream.write(json.dumps(data) + "\n") |
|
docs_cnt += 1 |
|
|
|
if pbar_queue is not None and docs_cnt % 1000 == 0: |
|
pbar_queue.put((0, docs_cnt)) |
|
docs_cnt = 0 |
|
|
|
if pbar_queue is not None: |
|
pbar_queue.put((1, docs_cnt)) |
|
|
|
|
|
def threaded_progressbar(q: Queue, timeout: float, total_files: Optional[int] = None): |
|
with ExitStack() as stack: |
|
files_pbar = stack.enter_context( |
|
tqdm(desc=" Files", unit="files", position=0, total=total_files) |
|
) |
|
docs_pbar = stack.enter_context( |
|
tqdm(desc=" Docs", unit=" docs", position=1, unit_scale=True) |
|
) |
|
|
|
while True: |
|
item = q.get() |
|
if item is None: |
|
break |
|
else: |
|
files, docs = item |
|
files_pbar.update(files) |
|
docs_pbar.update(docs) |
|
sleep(timeout) |
|
|
|
|
|
def main(): |
|
ap = argparse.ArgumentParser() |
|
ap.add_argument("src", type=str, help="Source path") |
|
ap.add_argument("dst", type=str, help="Destination path") |
|
ap.add_argument( |
|
"--debug", default=False, help="Debug mode", action="store_true" |
|
) |
|
ap.add_argument( |
|
"--parallel", type=int, default=cpu_count(), help="Number of parallel processes" |
|
) |
|
ap.add_argument( |
|
"-v", "--version", type=str, required=True, help="Version of the dataset" |
|
) |
|
opts = ap.parse_args() |
|
|
|
src = MultiPath.parse(opts.src) |
|
dst = MultiPath.parse(opts.dst) |
|
|
|
src_paths = [MultiPath.parse(p) for p in recursively_list_files(src)] |
|
dst_paths = [ |
|
dst / (diff) if len(diff := (single_src - src)) > 0 else dst |
|
for single_src in src_paths |
|
] |
|
|
|
if opts.debug: |
|
with tqdm(total=len(src_paths)) as pbar: |
|
for single_src, single_dst in zip(src_paths, dst_paths): |
|
process_single((single_src, single_dst), version=opts.version) |
|
pbar.update(1) |
|
|
|
else: |
|
set_start_method("spawn") |
|
|
|
with Pool(processes=opts.parallel) as pool: |
|
pbar_queue: Queue = (manager := Manager()).Queue() |
|
pbar_thread = Thread( |
|
target=threaded_progressbar, |
|
args=(pbar_queue, 0.1, len(src_paths)), |
|
daemon=True, |
|
) |
|
pbar_thread.start() |
|
|
|
for _ in pool.imap_unordered( |
|
partial(process_single, pbar_queue=pbar_queue, version=opts.version), |
|
tuple(zip(src_paths, dst_paths)), |
|
): |
|
... |
|
|
|
pool.close() |
|
pool.join() |
|
|
|
pbar_queue.put(None) |
|
pbar_thread.join() |
|
manager.shutdown() |
|
|
|
|
|
if __name__ == "__main__": |
|
main() |
|
|