|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""Wikipedia dataset containing cleaned articles of all languages.""" |
|
|
|
|
|
import json |
|
from tqdm import tqdm |
|
import datasets |
|
from pathlib import Path |
|
|
|
logger = datasets.logging.get_logger(__name__) |
|
|
|
|
|
_CITATION = """""" |
|
|
|
_DESCRIPTION = """""" |
|
|
|
_LICENSE = """""" |
|
|
|
|
|
_VERSION = datasets.Version("2.0.0", "") |
|
_NUM_SPLITS = 68 |
|
|
|
|
|
class WikipediaConfig(datasets.BuilderConfig): |
|
"""BuilderConfig for Wikipedia.""" |
|
|
|
def __init__(self, shard=None, version=_VERSION, **kwargs): |
|
"""BuilderConfig for Wikipedia. |
|
|
|
Args: |
|
split: int, split number. |
|
**kwargs: keyword arguments forwarded to super. |
|
""" |
|
super().__init__( |
|
name=f"shard_{shard}", |
|
description=f"Wikipedia dataset for split {shard}", |
|
version=version, |
|
**kwargs, |
|
) |
|
self.shard = shard |
|
|
|
|
|
class Wikipedia(datasets.GeneratorBasedBuilder): |
|
"""Wikipedia dataset.""" |
|
|
|
|
|
BUILDER_CONFIG_CLASS = WikipediaConfig |
|
BUILDER_CONFIG = [WikipediaConfig(shard=str(id)) for id in range(_NUM_SPLITS)] |
|
|
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=datasets.Features( |
|
{ |
|
"identifier": datasets.Value("string"), |
|
"name": datasets.Value("string"), |
|
"namespace_name": datasets.Value("string"), |
|
"namespace_identifier": datasets.Value("string"), |
|
"categories": [ |
|
{ |
|
"name": datasets.Value("string"), |
|
"url": datasets.Value("string"), |
|
} |
|
], |
|
"date_modified": datasets.Value("string"), |
|
"url": datasets.Value("string"), |
|
"html": datasets.Value("string"), |
|
"wikitext": datasets.Value("string"), |
|
"in_language": datasets.Value("string"), |
|
"main_entity": { |
|
"identifier": datasets.Value("string"), |
|
"url": datasets.Value("string"), |
|
}, |
|
"is_part_of": { |
|
"name": datasets.Value("string"), |
|
"identifier": datasets.Value("string"), |
|
}, |
|
"license": [ |
|
{ |
|
"name": datasets.Value("string"), |
|
"url": datasets.Value("string"), |
|
"identifier": datasets.Value("string"), |
|
} |
|
], |
|
} |
|
), |
|
|
|
supervised_keys=None, |
|
homepage="https://dumps.wikimedia.org", |
|
citation=_CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
data_paths = [Path(self.config.data_dir) / f"enwiki_{self.config.shard}.ndjson"] |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, gen_kwargs={"filepaths": data_paths} |
|
) |
|
] |
|
|
|
def _generate_examples( |
|
self, |
|
filepaths, |
|
): |
|
for filepath in filepaths: |
|
with open(filepath, "r") as f: |
|
for line in tqdm(f): |
|
example = json.loads(line) |
|
clean_example = {} |
|
clean_example["name"] = example["name"] |
|
clean_example["identifier"] = example["identifier"] |
|
clean_example["date_modified"] = example["date_modified"] |
|
clean_example["namespace_name"] = example["namespace"]["name"] |
|
clean_example["namespace_identifier"] = example["namespace"]["identifier"] |
|
clean_example["categories"] = example.get("categories", None) |
|
clean_example["url"] = example["url"] |
|
clean_example["html"] = f'{example["article_body"]["html"]}' |
|
clean_example["wikitext"] = example["article_body"]["wikitext"] |
|
clean_example["in_language"] = example["in_language"] |
|
clean_example["main_entity"] = example.get("main_entity", None) |
|
clean_example["is_part_of"] = example["is_part_of"] |
|
clean_example["license"] = example["license"] |
|
yield clean_example["identifier"], clean_example |
|
|