BenCzechMark-unstable / tasks_metadata.json
idolezal's picture
Typo in tasks names
cc2dcfa
raw
history blame
10 kB
{
"benczechmark_propaganda_argumentace": {
"name": "Propaganda – Argumentace",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_argumentace",
"short_name": "P-Argumentace",
"category": "NLI",
"abbreviation": "P-ARG"
},
"benczechmark_propaganda_fabulace": {
"name": "Propaganda – Fabulace",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_fabulace",
"short_name": "P-Fabulace",
"category": "NLI",
"abbreviation": "P-FAB"
},
"benczechmark_propaganda_nazor": {
"name": "Propaganda – NΓ‘zor",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_nazor",
"short_name": "P-NΓ‘zor",
"category": "NLI",
"abbreviation": "P-NAZOR"
},
"benczechmark_propaganda_strach": {
"name": "Propaganda – Strach",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_strach",
"short_name": "P-Strach",
"category": "NLI",
"abbreviation": "P-STCH"
},
"benczechmark_propaganda_zamereni": {
"name": "Propaganda – ZamΔ›Ε™enΓ­",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_zamereni",
"short_name": "P-ZamΔ›Ε™enΓ­",
"category": "NLI",
"abbreviation": "P-MER"
},
"benczechmark_propaganda_demonizace": {
"name": "Propaganda – DΓ©monizace",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_demonizace",
"short_name": "P-DΓ©monizace",
"category": "NLI",
"abbreviation": "P-DEMON"
},
"benczechmark_propaganda_lokace": {
"name": "Propaganda – Lokace",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_lokace",
"short_name": "P-Lokace",
"category": "NLI",
"abbreviation": "P-LOK"
},
"benczechmark_propaganda_relativizace": {
"name": "Propaganda – Relativizace",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_relativizace",
"short_name": "P-Relativizace",
"category": "NLI",
"abbreviation": "P-REL"
},
"benczechmark_propaganda_vina": {
"name": "Propaganda – Vina",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_vina",
"short_name": "P-Vina",
"category": "NLI",
"abbreviation": "P-VINA"
},
"benczechmark_propaganda_zanr": {
"name": "Propaganda – Ε½Γ‘nr",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_zanr",
"short_name": "P-Ε½Γ‘nr",
"category": "NLI",
"abbreviation": "P-ZANR"
},
"benczechmark_propaganda_emoce": {
"name": "Propaganda – Emoce",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_emoce",
"short_name": "P-Emoce",
"category": "NLI",
"abbreviation": "P-EMOCE"
},
"benczechmark_propaganda_nalepkovani": {
"name": "Propaganda – NΓ‘lepkovΓ‘nΓ­",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_nalepkovani",
"short_name": "P-NΓ‘lepkovΓ‘nΓ­",
"category": "NLI",
"abbreviation": "P-LEP"
},
"benczechmark_propaganda_rusko": {
"name": "Propaganda – Rusko",
"source_url": "https://huggingface.co/datasets/CZLC/propaganda_rusko",
"short_name": "P-Rusko",
"category": "NLI",
"abbreviation": "P-RUS"
},
"benczechmark_sentiment_mall": {
"name": "CzechSentiment MALL",
"source_url": "https://huggingface.co/datasets/CZLC/mall_sentiment_balanced",
"short_name": "S-Mall",
"category": "Sentiment",
"abbreviation": "S-MALL"
},
"benczechmark_sentiment_fb": {
"name": "CzechSentiment FB",
"source_url": "https://huggingface.co/datasets/CZLC/fb_sentiment_balanced",
"short_name": "S-FB",
"category": "Sentiment",
"abbreviation": "S-FB"
},
"benczechmark_sentiment_csfd": {
"name": "CzechSentiment CSFD",
"source_url": "https://huggingface.co/datasets/CZLC/csfd_sentiment_balanced",
"short_name": "S-CSFD",
"category": "Sentiment",
"abbreviation": "S-CSFD"
},
"benczechmark_summarization": {
"name": "SUMECZECH",
"source_url": "https://huggingface.co/datasets/CZLC/sumeczech_downsampled",
"short_name": "Summarization",
"category": "Summarization",
"abbreviation": "SUM"
},
"benczechmark_grammarerrorcorrection": {
"name": "GrammarErrorCorrection",
"source_url": "https://huggingface.co/datasets/CZLC/cs_gec",
"short_name": "Grammar Error Correction",
"category": "Syntactical Reasoning",
"abbreviation": "GEC"
},
"benczechmark_cs_naturalquestions": {
"name": "NaturalQuestions-CZ",
"source_url": "https://huggingface.co/datasets/CZLC/cs_naturalquestions",
"short_name": "CS Natural Questions",
"category": "Knowledge",
"abbreviation": "NQ"
},
"benczechmark_cs_sqad32": {
"name": "SQAD3.2",
"source_url": "https://huggingface.co/datasets/CZLC/SQAD_3.2",
"short_name": "CS SQAD 3.2",
"category": "Knowledge",
"abbreviation": "SQAD32"
},
"benczechmark_cs_triviaQA": {
"name": "TriviaQA-CZ",
"source_url": "https://huggingface.co/datasets/CZLC/cs_triviaqa",
"short_name": "CS TriviaQA",
"category": "Knowledge",
"abbreviation": "TQA"
},
"benczechmark_csfever_nli": {
"name": "CSFEVER",
"source_url": "https://huggingface.co/datasets/CZLC/ctu-aic/csfever_nli",
"short_name": "CSFever NLI",
"category": "NLI",
"abbreviation": "CFR"
},
"benczechmark_ctkfacts_nli": {
"name": "CTKFACTS",
"source_url": "https://huggingface.co/datasets/CZLC/ctu-aic/ctkfacts_nli",
"short_name": "CTKFacts NLI",
"category": "NLI",
"abbreviation": "CTK"
},
"benczechmark_cs_ner": {
"name": "CZECH NER CORPUS 2.0",
"source_url": "https://huggingface.co/datasets/CZLC/fewshot-goes-multilingual/cs_czech-named-entity-corpus_2.0",
"short_name": "CS NER",
"category": "NER",
"abbreviation": "CZNERC"
},
"benczechmark_hellaswag": {
"name": "HellaSwag-CZ",
"source_url": "https://huggingface.co/datasets/CZLC/cs_hellaswag",
"short_name": "HellaSwag",
"category": "Language Modeling",
"abbreviation": "HASG"
},
"benczechmark_histcorpus": {
"name": "Historical Corpus",
"source_url": "https://huggingface.co/datasets/CZLC/benczechmark_histcorpus",
"short_name": "HistCorpus",
"category": "Language Modeling",
"abbreviation": "HIST"
},
"benczechmark_klokan_qa": {
"name": "Klokan QA",
"source_url": "https://huggingface.co/datasets/hynky/klokan-qa",
"short_name": "Klokan QA",
"category": "Czech Math Reasoning",
"abbreviation": "KQA"
},
"benczechmark_cs_court_decisions_ner": {
"name": "Czech Court Decisions",
"source_url": "https://huggingface.co/datasets/CZLC/fewshot-goes-multilingual/cs_czech-court-decisions-ner",
"short_name": "CS Court Decisions NER",
"category": "NER",
"abbreviation": "CCDNER"
},
"benczechmark_umimeto_biology": {
"name": "Umimeto.cz – Biology",
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-biology",
"short_name": "Umimeto.cz – Biology",
"category": "General Reasoning",
"abbreviation": "UT-BIO"
},
"benczechmark_umimeto_chemistry": {
"name": "Umimeto.cz – Chemistry",
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-chemistry",
"short_name": "Umimeto.cz – Chemistry",
"category": "General Reasoning",
"abbreviation": "UT-CHEM"
},
"benczechmark_umimeto_czech": {
"name": "Umimeto.cz – Czech Language",
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-czech",
"short_name": "Umimeto.cz – Czech",
"category": "General Reasoning",
"abbreviation": "UT-CZEL"
},
"benczechmark_umimeto_history": {
"name": "Umimeto.cz – History",
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-history",
"short_name": "Umimeto.cz – History",
"category": "General Reasoning",
"abbreviation": "UT-HIST"
},
"benczechmark_umimeto_informatics": {
"name": "Umimeto.cz – Informatics",
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-informatics",
"short_name": "Umimeto.cz – Informatics",
"category": "General Reasoning",
"abbreviation": "UT-IT"
},
"benczechmark_umimeto_math": {
"name": "Umimeto.cz – Math",
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-math",
"short_name": "Umimeto.cz – Math",
"category": "Czech Math Reasoning",
"abbreviation": "UT-MATH"
},
"benczechmark_umimeto_physics": {
"name": "Umimeto.cz – Physics",
"source_url": "https://huggingface.co/datasets/CZLC/umimeto-physics",
"short_name": "Umimeto.cz – Physics",
"category": "General Reasoning",
"abbreviation": "UT-PHYS"
},
"benczechmark_cermat_czmath_mc": {
"name": "CERMAT – Czech Math – MC",
"source_url": "https://huggingface.co/datasets/CZLC/cermat_math_mc",
"short_name": "Cermat Czech Math MC",
"category": "Czech Math Reasoning",
"abbreviation": "CCM-MC"
},
"benczechmark_cermat_czmath_open": {
"name": "CERMAT – Czech Math – OPEN",
"source_url": "https://huggingface.co/datasets/CZLC/cermat_math_open",
"short_name": "Cermat Czech Math Open",
"category": "Czech Math Reasoning",
"abbreviation": "CCM-OPEN"
},
"benczechmark_cermat_czech_tf": {
"name": "CERMAT – Czech Language – TF",
"source_url": "https://huggingface.co/datasets/CZLC/cermat_czech_tf",
"short_name": "Cermat Czech Language TF",
"category": "General Reasoning",
"abbreviation": "CCL-TF"
},
"benczechmark_cermat_czech_mc": {
"name": "CERMAT – Czech Language – MC",
"source_url": "https://huggingface.co/datasets/CZLC/cermat_czech_mc",
"short_name": "Cermat Czech Language MC",
"category": "General Reasoning",
"abbreviation": "CCL-MC"
},
"benczechmark_cermat_czech_open": {
"name": "CERMAT – Czech Language – OPEN",
"source_url": "https://huggingface.co/datasets/CZLC/cermat_czech_open",
"short_name": "Cermat Czech Language Open",
"category": "General Reasoning",
"abbreviation": "CCL-OPEN"
}
}