albertvillanova HF staff commited on
Commit
c9d77dd
1 Parent(s): 028b279

Delete legacy JSON metadata

Browse files

Delete legacy `dataset_infos.json`.

Files changed (1) hide show
  1. dataset_infos.json +0 -1
dataset_infos.json DELETED
@@ -1 +0,0 @@
1
- {"default": {"description": "\nThe Arabic Sentiment Twitter Dataset for Levantine dialect (ArSenTD-LEV) contains 4,000 tweets written in Arabic and equally retrieved from Jordan, Lebanon, Palestine and Syria.\n", "citation": "\n@article{ArSenTDLev2018,\ntitle={ArSentD-LEV: A Multi-Topic Corpus for Target-based Sentiment Analysis in Arabic Levantine Tweets},\nauthor={Baly, Ramy, and Khaddaj, Alaa and Hajj, Hazem and El-Hajj, Wassim and Bashir Shaban, Khaled},\njournal={OSACT3},\npages={},\nyear={2018}}\n", "homepage": "http://oma-project.com/ArSenL/ArSenTD_Lev_Intro", "license": "", "features": {"Tweet": {"dtype": "string", "id": null, "_type": "Value"}, "Country": {"num_classes": 4, "names": ["jordan", "lebanon", "syria", "palestine"], "names_file": null, "id": null, "_type": "ClassLabel"}, "Topic": {"dtype": "string", "id": null, "_type": "Value"}, "Sentiment": {"num_classes": 5, "names": ["negative", "neutral", "positive", "very_negative", "very_positive"], "names_file": null, "id": null, "_type": "ClassLabel"}, "Sentiment_Expression": {"num_classes": 3, "names": ["explicit", "implicit", "none"], "names_file": null, "id": null, "_type": "ClassLabel"}, "Sentiment_Target": {"dtype": "string", "id": null, "_type": "Value"}}, "post_processed": null, "supervised_keys": null, "builder_name": "arsentd_lev", "config_name": "default", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1233980, "num_examples": 4000, "dataset_name": "arsentd_lev"}}, "download_checksums": {"http://oma-project.com/ArSenL/ArSenTD-LEV.zip": {"num_bytes": 392666, "checksum": "399d03bf6e8eb50415355132bc6742b2d7a9728070f6f789d705616fd12189c3"}}, "download_size": 392666, "post_processing_size": null, "dataset_size": 1233980, "size_in_bytes": 1626646}}