{ "_name_or_path": "ufal/robeczech-base", "architectures": [ "RobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.2, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.3, "hidden_size": 768, "id2label": { "0": "O", "1": "B-ah", "2": "I-ah", "3": "B-at", "4": "I-at", "5": "B-az", "6": "I-az", "7": "B-g_", "8": "I-g_", "9": "B-gc", "10": "I-gc", "11": "B-gh", "12": "I-gh", "13": "B-gl", "14": "I-gl", "15": "B-gp", "16": "I-gp", "17": "B-gq", "18": "I-gq", "19": "B-gr", "20": "I-gr", "21": "B-gs", "22": "I-gs", "23": "B-gt", "24": "I-gt", "25": "B-gu", "26": "I-gu", "27": "B-i_", "28": "I-i_", "29": "B-ia", "30": "I-ia", "31": "B-ic", "32": "I-ic", "33": "B-if", "34": "I-if", "35": "B-io", "36": "I-io", "37": "B-mn", "38": "I-mn", "39": "B-mt", "40": "I-mt", "41": "B-mr", "42": "I-mr", "43": "B-o_", "44": "I-o_", "45": "B-oa", "46": "I-oa", "47": "B-oc", "48": "I-oc", "49": "B-oe", "50": "I-oe", "51": "B-om", "52": "I-om", "53": "B-op", "54": "I-op", "55": "B-or", "56": "I-or", "57": "B-p_", "58": "I-p_", "59": "B-pb", "60": "I-pb", "61": "B-pc", "62": "I-pc", "63": "B-pd", "64": "I-pd", "65": "B-pf", "66": "I-pf", "67": "B-pm", "68": "I-pm", "69": "B-pp", "70": "I-pp", "71": "B-ps", "72": "I-ps", "73": "B-td", "74": "I-td", "75": "B-tf", "76": "I-tf", "77": "B-th", "78": "I-th", "79": "B-ti", "80": "I-ti", "81": "B-tm", "82": "I-tm", "83": "B-ty", "84": "I-ty" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-ah": 1, "B-at": 3, "B-az": 5, "B-g_": 7, "B-gc": 9, "B-gh": 11, "B-gl": 13, "B-gp": 15, "B-gq": 17, "B-gr": 19, "B-gs": 21, "B-gt": 23, "B-gu": 25, "B-i_": 27, "B-ia": 29, "B-ic": 31, "B-if": 33, "B-io": 35, "B-mn": 37, "B-mr": 41, "B-mt": 39, "B-o_": 43, "B-oa": 45, "B-oc": 47, "B-oe": 49, "B-om": 51, "B-op": 53, "B-or": 55, "B-p_": 57, "B-pb": 59, "B-pc": 61, "B-pd": 63, "B-pf": 65, "B-pm": 67, "B-pp": 69, "B-ps": 71, "B-td": 73, "B-tf": 75, "B-th": 77, "B-ti": 79, "B-tm": 81, "B-ty": 83, "I-ah": 2, "I-at": 4, "I-az": 6, "I-g_": 8, "I-gc": 10, "I-gh": 12, "I-gl": 14, "I-gp": 16, "I-gq": 18, "I-gr": 20, "I-gs": 22, "I-gt": 24, "I-gu": 26, "I-i_": 28, "I-ia": 30, "I-ic": 32, "I-if": 34, "I-io": 36, "I-mn": 38, "I-mr": 42, "I-mt": 40, "I-o_": 44, "I-oa": 46, "I-oc": 48, "I-oe": 50, "I-om": 52, "I-op": 54, "I-or": 56, "I-p_": 58, "I-pb": 60, "I-pc": 62, "I-pd": 64, "I-pf": 66, "I-pm": 68, "I-pp": 70, "I-ps": 72, "I-td": 74, "I-tf": 76, "I-th": 78, "I-ti": 80, "I-tm": 82, "I-ty": 84, "O": 0 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.36.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 51997 }