lettuce_pos_de_xlm / config.json
pranaydeeps's picture
Upload folder using huggingface_hub
1f78c04 verified
{
"_name_or_path": "xlm-roberta-base",
"architectures": [
"XLMRobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"finetuning_task": "pos",
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "ART",
"1": "PWAV",
"2": "PIAT",
"3": "ADV",
"4": "KON",
"5": "VAPP",
"6": "ITJ",
"7": "$,",
"8": "PPOSAT",
"9": "VAINF",
"10": "PRELAT",
"11": "FM",
"12": "VVPP",
"13": "PWS",
"14": "VVIZU",
"15": "ADJD",
"16": "APZR",
"17": "NN",
"18": "TRUNC",
"19": "PTKA",
"20": "PROAV",
"21": "CARD",
"22": "PDS",
"23": "VMINF",
"24": "PRELS",
"25": "VVIMP",
"26": "PPOSS",
"27": "PDAT",
"28": "KOKOM",
"29": "PTKANT",
"30": "APPRART",
"31": "KOUI",
"32": "PIS",
"33": "PPER",
"34": "VVINF",
"35": "APPR",
"36": "KOUS",
"37": "PTKNEG",
"38": "PRF",
"39": "PWAT",
"40": "APPO",
"41": "$.",
"42": "$(",
"43": "PTKVZ",
"44": "VMFIN",
"45": "VMPP",
"46": "XY",
"47": "VAIMP",
"48": "ADJA",
"49": "VVFIN",
"50": "NE",
"51": "VAFIN",
"52": "PTKZU"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"$(": 42,
"$,": 7,
"$.": 41,
"ADJA": 48,
"ADJD": 15,
"ADV": 3,
"APPO": 40,
"APPR": 35,
"APPRART": 30,
"APZR": 16,
"ART": 0,
"CARD": 21,
"FM": 11,
"ITJ": 6,
"KOKOM": 28,
"KON": 4,
"KOUI": 31,
"KOUS": 36,
"NE": 50,
"NN": 17,
"PDAT": 27,
"PDS": 22,
"PIAT": 2,
"PIS": 32,
"PPER": 33,
"PPOSAT": 8,
"PPOSS": 26,
"PRELAT": 10,
"PRELS": 24,
"PRF": 38,
"PROAV": 20,
"PTKA": 19,
"PTKANT": 29,
"PTKNEG": 37,
"PTKVZ": 43,
"PTKZU": 52,
"PWAT": 39,
"PWAV": 1,
"PWS": 13,
"TRUNC": 18,
"VAFIN": 51,
"VAIMP": 47,
"VAINF": 9,
"VAPP": 5,
"VMFIN": 44,
"VMINF": 23,
"VMPP": 45,
"VVFIN": 49,
"VVIMP": 25,
"VVINF": 34,
"VVIZU": 14,
"VVPP": 12,
"XY": 46
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "xlm-roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.25.1",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 250002
}