xlel_wd_dictionary / xlel_wd_dictionary.py
adithya7
add dataset script
410d6f6
raw
history blame contribute delete
No virus
3.98 kB
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A dictionary of events in Wikidata"""
import json
import datasets
_CITATION = """\
@article{pratapa-etal-2022-multilingual,
title = {Multilingual Event Linking to Wikidata},
author = {Pratapa, Adithya and Gupta, Rishubh and Mitamura, Teruko},
publisher = {arXiv},
year = {2022},
url = {https://arxiv.org/abs/2204.06535},
}
"""
_DESCRIPTION = """\
XLEL-WD is a multilingual event linking dataset. \
This sub-dataset contains a dictionary of events from Wikidata. \
The multilingual descriptions for Wikidata event items are taken from the corresponding Wikipedia articles.
"""
_HOMEPAGE = "https://github.com/adithya7/xlel-wd"
_LICENSE = "CC-BY-4.0"
_URLS = {
"wikidata": "label_dict.jsonl",
}
_WIKIDATA_LANGS = [
"af",
"ar",
"be",
"bg",
"bn",
"ca",
"cs",
"da",
"de",
"el",
"en",
"es",
"fa",
"fi",
"fr",
"he",
"hi",
"hu",
"id",
"it",
"ja",
"ko",
"ml",
"mr",
"ms",
"nl",
"no",
"pl",
"pt",
"ro",
"ru",
"si",
"sk",
"sl",
"sr",
"sv",
"sw",
"ta",
"te",
"th",
"tr",
"uk",
"vi",
"zh",
]
_DATASET_NAMES = [f"wikidata"]
_DATASET_DESCRIPTIONS = [
f"Events from Wikidata, with text descriptions from corresponding multilingual Wikipedia."
]
for lang in _WIKIDATA_LANGS:
_DATASET_NAMES += [f"wikidata.{lang}"]
_DATASET_DESCRIPTIONS += [
f"Events from Wikidata, with text descriptions from corresponding {lang}-wikipedia."
]
class XlelWdDictionary(datasets.GeneratorBasedBuilder):
"""A dictionary of events from Wikidata, with text descriptions from multilingual Wikipedia."""
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name=name, version=datasets.Version("1.0.0"), description=desc
)
for name, desc in zip(_DATASET_NAMES, _DATASET_DESCRIPTIONS)
]
# default config includes all available languages
DEFAULT_CONFIG_NAME = "wikidata"
def _info(self):
features = {
"label_id": datasets.Value("string"),
"label_title": datasets.Value("string"),
"label_desc": datasets.Value("string"),
"label_lang": datasets.Value("string"),
}
return datasets.DatasetInfo(
description=_DESCRIPTION + self.config.description,
features=datasets.Features(features),
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
urls = _URLS[self.config.name.split(".")[0]]
data_file_path = dl_manager.download_and_extract(urls)
return [
datasets.SplitGenerator(
name="dictionary",
gen_kwargs={"filepath": data_file_path},
)
]
def _generate_examples(self, filepath):
_, *langs = self.config.name.split(".")
with open(filepath, encoding="utf-8") as f:
for key, row in enumerate(f):
data = json.loads(row)
# generate list of Wikidata events with descriptions from specified language
# if no language is specified in the config, return all
if len(langs) == 0 or langs[0] == data["label_lang"]:
yield key, data