|
"""OPUS liv4ever dataset.""" |
|
|
|
from pathlib import Path |
|
|
|
import datasets |
|
|
|
|
|
_DESCRIPTION = """\ |
|
This is the Livonian 4-lingual parallel corpus. Livonian is a Uralic / Finnic language with just about 20 fluent |
|
speakers and no native speakers (as of 2021). The texts and translations in this corpus were collected from all the |
|
digital text resources that could be found by the authors; scanned and printed materials are left for future work. |
|
""" |
|
|
|
_HOMEPAGE = "https://opus.nlpl.eu/liv4ever.php" |
|
|
|
_LICENSE = "CC BY-SA" |
|
|
|
_CITATION = r""" |
|
@inproceedings{rikters-etal-2022-machine, |
|
title = "Machine Translation for {L}ivonian: Catering to 20 Speakers", |
|
author = "Rikters, Mat{\=\i}ss and |
|
Tomingas, Marili and |
|
Tuisk, Tuuli and |
|
Ern{\v{s}}treits, Valts and |
|
Fishel, Mark", |
|
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)", |
|
month = may, |
|
year = "2022", |
|
address = "Dublin, Ireland", |
|
publisher = "Association for Computational Linguistics", |
|
url = "https://aclanthology.org/2022.acl-short.55", |
|
doi = "10.18653/v1/2022.acl-short.55", |
|
pages = "508--514", |
|
} |
|
@inproceedings{tiedemann-2012-parallel, |
|
title = "Parallel Data, Tools and Interfaces in {OPUS}", |
|
author = {Tiedemann, J{\"o}rg}, |
|
booktitle = "Proceedings of the Eighth International Conference on Language Resources and Evaluation ({LREC}'12)", |
|
month = may, |
|
year = "2012", |
|
address = "Istanbul, Turkey", |
|
publisher = "European Language Resources Association (ELRA)", |
|
url = "http://www.lrec-conf.org/proceedings/lrec2012/pdf/463_Paper.pdf", |
|
pages = "2214--2218", |
|
} |
|
""" |
|
|
|
_URLS = { |
|
"parallel": "https://opus.nlpl.eu/download.php?f=liv4ever/v1/moses/{}-{}.txt.zip", |
|
"monolingual": "https://opus.nlpl.eu/download.php?f=liv4ever/v1/mono/{}.txt.gz", |
|
} |
|
_LANGUAGES = ["en", "et", "fr", "liv", "lv"] |
|
_LANGUAGE_PAIRS = [("en", "liv"), ("et", "liv"), ("fr", "liv"), ("liv", "lv")] |
|
|
|
|
|
class Liv4EverConfig(datasets.BuilderConfig): |
|
def __init__(self, language_pair=None, language=None, version=datasets.Version("1.0.0"), **kwargs): |
|
if (language_pair and language) or (not language_pair and not language): |
|
raise ValueError("Pass either 'language_pair' or 'language'") |
|
if language_pair: |
|
if isinstance(language_pair, str): |
|
language_pair = language_pair.split("-") |
|
language_pair = tuple(sorted(language_pair)) |
|
name = f"{'-'.join(language_pair) if language_pair else language}" |
|
else: |
|
name = f"{language}" |
|
super().__init__(name=name, version=version, **kwargs) |
|
self.language_pair = language_pair |
|
self.language = language |
|
|
|
|
|
class Liv4Ever(datasets.GeneratorBasedBuilder): |
|
|
|
BUILDER_CONFIG_CLASS = Liv4EverConfig |
|
|
|
BUILDER_CONFIGS = [Liv4EverConfig(language_pair=language_pair) for language_pair in _LANGUAGE_PAIRS] + [ |
|
Liv4EverConfig(language=language) for language in _LANGUAGES |
|
] |
|
|
|
def _info(self): |
|
if self.config.language_pair: |
|
features = datasets.Features( |
|
{ |
|
"translation": datasets.Translation(languages=self.config.language_pair), |
|
} |
|
) |
|
else: |
|
features = datasets.Features( |
|
{ |
|
"text": datasets.Value("string"), |
|
} |
|
) |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=features, |
|
homepage=_HOMEPAGE, |
|
license=_LICENSE, |
|
citation=_CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
url = ( |
|
_URLS["parallel"].format(*self.config.language_pair) |
|
if self.config.language_pair |
|
else _URLS["monolingual"].format(self.config.language) |
|
) |
|
path = dl_manager.download_and_extract(url) |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={ |
|
"path": path, |
|
}, |
|
), |
|
] |
|
|
|
def _generate_examples(self, path): |
|
if self.config.language_pair: |
|
for key, item in enumerate(_parse_txt_pair(path, self.config.language_pair)): |
|
|
|
if key == 0: |
|
continue |
|
yield key, {"translation": item} |
|
else: |
|
for key, line in enumerate(_parse_txt(path)): |
|
|
|
if key == 0: |
|
continue |
|
yield key, { |
|
"text": line, |
|
} |
|
|
|
|
|
def _parse_txt(path): |
|
with open(path, encoding="utf-8") as f: |
|
for line in f: |
|
line = line.strip() |
|
if line: |
|
yield line |
|
|
|
|
|
def _parse_txt_pair(path, language_pair): |
|
paths = [sorted(Path(path).glob(f"*.{language}"))[0] for language in language_pair] |
|
for line_pair in zip(_parse_txt(paths[0]), _parse_txt(paths[1])): |
|
yield {language: line for language, line in zip(language_pair, line_pair)} |
|
|