masakhanews / masakhanews.py
Davlan's picture
Upload 2 files
1c716b0
# coding=utf-8
# Copyright 2020 HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""MasakhaNEWS: News Topic Classification for African languages"""
import datasets
import pandas
import pandas as pd
logger = datasets.logging.get_logger(__name__)
_CITATION = """\
@article{Adelani2023MasakhaNEWS,
title={MasakhaNEWS: News Topic Classification for African languages},
author={David Ifeoluwa Adelani and Marek Masiak and Israel Abebe Azime and Jesujoba Oluwadara Alabi and Atnafu Lambebo Tonja and Christine Mwase and Odunayo Ogundepo and Bonaventure F. P. Dossou and Akintunde Oladipo and Doreen Nixdorf and Chris Chinenye Emezue and Sana Sabah al-azzawi and Blessing K. Sibanda and Davis David and Lolwethu Ndolela and Jonathan Mukiibi and Tunde Oluwaseyi Ajayi and Tatiana Moteu Ngoli and Brian Odhiambo and Abraham Toluwase Owodunni and Nnaemeka C. Obiefuna and Shamsuddeen Hassan Muhammad and Saheed Salahudeen Abdullahi and Mesay Gemeda Yigezu and Tajuddeen Gwadabe and Idris Abdulmumin and Mahlet Taye Bame and Oluwabusayo Olufunke Awoyomi and Iyanuoluwa Shode and Tolulope Anu Adelani and Habiba Abdulganiy Kailani and Abdul-Hakeem Omotayo and Adetola Adeeko and Afolabi Abeeb and Anuoluwapo Aremu and Olanrewaju Samuel and Clemencia Siro and Wangari Kimotho and Onyekachi Raphael Ogbu and Chinedu E. Mbonu and Chiamaka I. Chukwuneke and Samuel Fanijo and Jessica Ojo and Oyinkansola F. Awosan and Tadesse Kebede Guge and Sakayo Toadoum Sari and Pamela Nyatsine and Freedmore Sidume and Oreen Yousuf and Mardiyyah Oduwole and Ussen Kimanuka and Kanda Patrick Tshinu and Thina Diko and Siyanda Nxakama and Abdulmejid Tuni Johar and Sinodos Gebre and Muhidin Mohamed and Shafie Abdi Mohamed and Fuad Mire Hassan and Moges Ahmed Mehamed and Evrard Ngabire and and Pontus Stenetorp},
journal={ArXiv},
year={2023},
volume={}
}
"""
_DESCRIPTION = """\
MasakhaNEWS is the largest publicly available dataset for news topic classification in 16 languages widely spoken in Africa.
The languages are:
- Amharic (amh)
- English (eng)
- French (fra)
- Hausa (hau)
- Igbo (ibo)
- Lingala (lin)
- Luganda (lug)
- Oromo (orm)
- Nigerian Pidgin (pcm)
- Rundi (run)
- chShona (sna)
- Somali (som)
- Kiswahili (swą)
- Tigrinya (tir)
- isiXhosa (xho)
- Yorùbá (yor)
The train/validation/test sets are available for all the 16 languages.
For more details see *** arXiv link **
"""
_URL = "https://github.com/masakhane-io/masakhane-news/raw/main/data/"
_TRAINING_FILE = "train.tsv"
_DEV_FILE = "dev.tsv"
_TEST_FILE = "test.tsv"
class MasakhanewsConfig(datasets.BuilderConfig):
"""BuilderConfig for Masakhanews"""
def __init__(self, **kwargs):
"""BuilderConfig for Masakhanews.
Args:
**kwargs: keyword arguments forwarded to super.
"""
super(MasakhanewsConfig, self).__init__(**kwargs)
class Masakhanews(datasets.GeneratorBasedBuilder):
"""Masakhanews dataset."""
BUILDER_CONFIGS = [
MasakhanewsConfig(name="amh", version=datasets.Version("1.0.0"), description="Masakhanews Amharic dataset"),
MasakhanewsConfig(name="eng", version=datasets.Version("1.0.0"), description="Masakhanews English dataset"),
MasakhanewsConfig(name="fra", version=datasets.Version("1.0.0"), description="Masakhanews French dataset"),
MasakhanewsConfig(name="hau", version=datasets.Version("1.0.0"), description="Masakhanews Hausa dataset"),
MasakhanewsConfig(name="ibo", version=datasets.Version("1.0.0"), description="Masakhanews Igbo dataset"),
MasakhanewsConfig(name="lin", version=datasets.Version("1.0.0"), description="Masakhanews Lingala dataset"),
MasakhanewsConfig(name="lug", version=datasets.Version("1.0.0"), description="Masakhanews Luganda dataset"),
MasakhanewsConfig(name="orm", version=datasets.Version("1.0.0"), description="Masakhanews Oromo dataset"),
MasakhanewsConfig(
name="pcm", version=datasets.Version("1.0.0"), description="Masakhanews Nigerian-Pidgin dataset"
),
MasakhanewsConfig(name="run", version=datasets.Version("1.0.0"), description="Masakhanews Rundi dataset"),
MasakhanewsConfig(name="sna", version=datasets.Version("1.0.0"), description="Masakhanews Shona dataset"),
MasakhanewsConfig(name="som", version=datasets.Version("1.0.0"), description="Masakhanews Somali dataset"),
MasakhanewsConfig(name="swa", version=datasets.Version("1.0.0"), description="Masakhanews Swahili dataset"),
MasakhanewsConfig(name="tir", version=datasets.Version("1.0.0"), description="Masakhanews Tigrinya dataset"),
MasakhanewsConfig(name="xho", version=datasets.Version("1.0.0"), description="Masakhanews Xhosa dataset"),
MasakhanewsConfig(name="yor", version=datasets.Version("1.0.0"), description="Masakhanews Yoruba dataset"),
]
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"label": datasets.features.ClassLabel(
names=["business", "entertainment", "health", "politics", "religion", "sports", "technology"]
),
"headline": datasets.Value("string"),
"text": datasets.Value("string"),
"headline_text": datasets.Value("string"),
"url": datasets.Value("string"),
}
),
supervised_keys=None,
homepage="https://github.com/masakhane-io/masakhane-news",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
urls_to_download = {
"train": f"{_URL}{self.config.name}/{_TRAINING_FILE}",
"dev": f"{_URL}{self.config.name}/{_DEV_FILE}",
"test": f"{_URL}{self.config.name}/{_TEST_FILE}",
}
downloaded_files = dl_manager.download_and_extract(urls_to_download)
return [
datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": downloaded_files["train"]}),
datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": downloaded_files["dev"]}),
datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": downloaded_files["test"]}),
]
def _generate_examples(self, filepath):
logger.info("⏳ Generating examples from = %s", filepath)
df = pd.read_csv(filepath, sep='\t')
N = df.shape[0]
for id_ in range(N):
yield id_, {
"label": df['category'].iloc[id_],
"headline": df['headline'].iloc[id_],
"text": df['text'].iloc[id_],
"headline_text": df['headline'].iloc[id_] + ' ' + df['text'].iloc[id_],
"url": df['url'].iloc[id_],
}