eu_wikipedias / eu_wikipedias.py
dlwh's picture
initial commit
1e342a3
raw
history blame
3.34 kB
# This is a huggingface dataset script to load wikipedias for the eu languages using olm/wikipedia
# https://huggingface.co/datasets/olm/wikipedia/blob/main/wikipedia.py
import datasets
from datasets import DatasetDict
logger = datasets.logging.get_logger(__name__)
_CITATION = """\
@ONLINE {wikidump,
author = {Wikimedia Foundation},
title = {Wikimedia Downloads},
url = {https://dumps.wikimedia.org}
}
"""
_DESCRIPTION = """\
Wikipedia dataset containing cleaned articles of all languages.
The datasets are built from the Wikipedia dump
(https://dumps.wikimedia.org/) with one split per language. Each example
contains the content of one full Wikipedia article with cleaning to strip
markdown and unwanted sections (references, etc.).
"""
_LICENSE = (
"This work is licensed under the Creative Commons Attribution-ShareAlike "
"3.0 Unported License. To view a copy of this license, visit "
"http://creativecommons.org/licenses/by-sa/3.0/ or send a letter to "
"Creative Commons, PO Box 1866, Mountain View, CA 94042, USA."
)
_VERSION = datasets.Version("1.0.0", "")
eu_languages = [
"bg",
"cs",
# "da",
# "de",
# "el",
# "en",
# "es",
# "et",
# "fi",
# "fr",
# "ga",
# "hr",
# "hu",
# "it",
# "lt",
# "lv",
# "mt",
# "nl",
# "pl",
# "pt",
# "ro",
# "sk",
# "sl",
# "sv",
]
class WikipediaConfig(datasets.BuilderConfig):
"""BuilderConfig for EuWikipedia."""
def __init__(self, date=None, version=_VERSION, **kwargs):
"""BuilderConfig for Wikipedia.
Args:
date: string, date of the Wikipedia dump in YYYYMMDD format. A list of
available dates can be found at https://dumps.wikimedia.org/enwiki/.
**kwargs: keyword arguments forwarded to super.
"""
super().__init__(
name=f"{date}",
description=f"Wikipedia dataset for EU languages, parsed from {date} dump.",
version=version,
**kwargs,
)
self.date = date
_DATE = "20221101"
class EuWikipedia(datasets.GeneratorBasedBuilder):
"""Wikipedia dataset."""
# Use mirror (your.org) to avoid download caps.
BUILDER_CONFIG_CLASS = WikipediaConfig
BUILDER_CONFIGS = [WikipediaConfig(date=_DATE,)]
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"id": datasets.Value("string"),
"url": datasets.Value("string"),
"title": datasets.Value("string"),
"text": datasets.Value("string"),
}
),
# No default supervised_keys.
supervised_keys=None,
homepage="https://dumps.wikimedia.org",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN, gen_kwargs={"date": self.config.date}
)
]
def _generate_examples(self, date):
# defer to olm/wikipedia
for lang in eu_languages:
for example in datasets.load_dataset("olm/wikipedia", language=lang, date=date):
yield example