|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
""" |
|
Mirror of https://github.com/Alab-NII/2wikimultihop |
|
""" |
|
|
|
import json |
|
import datasets |
|
import pandas as pd |
|
|
|
_DESCRIPTION = """\ |
|
Mirror of https://github.com/Alab-NII/2wikimultihop |
|
""" |
|
|
|
CITATION = """ |
|
@inproceedings{xanh2020_2wikimultihop, |
|
title = "Constructing A Multi-hop {QA} Dataset for Comprehensive Evaluation of Reasoning Steps", |
|
author = "Ho, Xanh and |
|
Duong Nguyen, Anh-Khoa and |
|
Sugawara, Saku and |
|
Aizawa, Akiko", |
|
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics", |
|
month = dec, |
|
year = "2020", |
|
address = "Barcelona, Spain (Online)", |
|
publisher = "International Committee on Computational Linguistics", |
|
url = "https://www.aclweb.org/anthology/2020.coling-main.580", |
|
pages = "6609--6625", |
|
} |
|
""" |
|
|
|
DEV_URL = "https://huggingface.co/datasets/somebody-had-to-do-it/2WikiMultihopQA/resolve/main/dev.parquet?download=true" |
|
TRAIN_URL = "https://huggingface.co/datasets/somebody-had-to-do-it/2WikiMultihopQA/resolve/main/train.parquet?download=true" |
|
TEST_URL = "https://huggingface.co/datasets/somebody-had-to-do-it/2WikiMultihopQA/resolve/main/test.parquet?download=true" |
|
|
|
|
|
class Dataset2WikiMultihopQa(datasets.GeneratorBasedBuilder): |
|
"""Mirror of https://github.com/Alab-NII/2wikimultihop""" |
|
|
|
VERSION = datasets.Version("1.0.0") |
|
|
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=datasets.Features( |
|
{ |
|
"_id": datasets.Value("string"), |
|
"type": datasets.Value("string"), |
|
"question": datasets.Value("string"), |
|
"context": datasets.features.Sequence( |
|
{ |
|
"title": datasets.Value("string"), |
|
"content": datasets.features.Sequence( |
|
datasets.Value("string") |
|
), |
|
} |
|
), |
|
"supporting_facts": datasets.features.Sequence( |
|
{ |
|
"title": datasets.Value("string"), |
|
"sent_id": datasets.Value("int32"), |
|
} |
|
), |
|
"evidences": datasets.features.Sequence( |
|
{ |
|
"fact": datasets.Value("string"), |
|
"relation": datasets.Value("string"), |
|
"entity": datasets.Value("string"), |
|
} |
|
), |
|
"answer": datasets.Value("string"), |
|
} |
|
), |
|
supervised_keys=None, |
|
homepage="https://github.com/Alab-NII/2wikimultihop", |
|
citation=CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
"""Returns SplitGenerators.""" |
|
|
|
train_uri = dl_manager.download(TRAIN_URL) |
|
dev_uri = dl_manager.download(DEV_URL) |
|
test_uri = dl_manager.download(TEST_URL) |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, gen_kwargs={"filepath": train_uri} |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split("dev"), |
|
gen_kwargs={"filepath": dev_uri, "split": "dev"}, |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TEST, |
|
gen_kwargs={"filepath": test_uri}, |
|
), |
|
] |
|
|
|
def _generate_examples(self, filepath, *args, **kwargs): |
|
"""Yields examples from a Parquet file.""" |
|
|
|
df = pd.read_parquet(filepath) |
|
|
|
|
|
for idx, row in df.iterrows(): |
|
yield idx, { |
|
"_id": row["_id"], |
|
"type": row["type"], |
|
"question": row["question"], |
|
"context": [ |
|
{"title": item[0], "content": item[1]} |
|
for item in json.loads(row["context"]) |
|
], |
|
"supporting_facts": [ |
|
{"title": fact[0], "sent_id": fact[1]} |
|
for fact in json.loads(row["supporting_facts"]) |
|
], |
|
"evidences": [ |
|
{ |
|
"fact": evidence[0], |
|
"relation": evidence[1], |
|
"entity": evidence[2], |
|
} |
|
for evidence in json.loads(row["evidences"]) |
|
], |
|
"answer": row["answer"], |
|
} |
|
|