|
import json |
|
from datasets import Dataset, DatasetDict |
|
from util import PARTITIONING_CATS |
|
|
|
|
|
def construct_hf_dataset(metadata_file: str = "processed_sources.jsonl"): |
|
"""Construct a HF DatasetDict class from the HICRIC processed data dir, and push to hub.""" |
|
|
|
def data_generator(cat: str): |
|
def validate_tags(tags, partitioning_cats=PARTITIONING_CATS): |
|
|
|
matches = [tag for tag in tags if tag in partitioning_cats] |
|
|
|
|
|
if len(matches) == 0 or len(matches) >= 2: |
|
raise ValueError( |
|
f"The list of tags must contain exactly one key from the partitioning categories: {partitioning_cats}." |
|
) |
|
|
|
return True |
|
|
|
|
|
with open(metadata_file, "r") as metadata_f: |
|
for idx, line in enumerate(metadata_f): |
|
obj = json.loads(line) |
|
local_processed_path = obj["local_processed_path"] |
|
file_tags = obj["tags"] |
|
date_accessed = obj["date_accessed"] |
|
url = obj["url"] |
|
raw_md5 = obj["md5"] |
|
|
|
|
|
_valid = validate_tags(file_tags) |
|
if cat not in file_tags: |
|
continue |
|
|
|
|
|
with open(local_processed_path, "r") as data_file: |
|
for _idx, data_line in enumerate(data_file): |
|
data_obj = json.loads(data_line, strict=False) |
|
|
|
|
|
text = data_obj.get("text", "") |
|
line_tags = data_obj.get("tags", []) |
|
if len(text) == 0: |
|
continue |
|
if len(line_tags) > 0: |
|
tags = file_tags + line_tags |
|
else: |
|
tags = file_tags |
|
|
|
rec = { |
|
"text": data_obj.get("text", ""), |
|
"tags": tags, |
|
"date_accessed": date_accessed, |
|
"source_url": url, |
|
"source_md5": raw_md5, |
|
"relative_path": local_processed_path, |
|
} |
|
|
|
|
|
if cat == "case-description": |
|
rec["decision"] = data_obj.get("decision", "unknown") |
|
rec["appeal_type"] = data_obj.get("appeal_type", "unknown") |
|
|
|
yield rec |
|
|
|
|
|
dataset_dict = DatasetDict() |
|
for cat in PARTITIONING_CATS: |
|
sub_dataset = Dataset.from_generator( |
|
generator=data_generator, gen_kwargs={"cat": cat} |
|
) |
|
dataset_dict[cat] = sub_dataset |
|
|
|
|
|
for k, v in dataset_dict.items(): |
|
v.push_to_hub("persius/hicric", k, private=True) |
|
|
|
|
|
return None |
|
|
|
|
|
if __name__ == "__main__": |
|
construct_hf_dataset() |
|
|