brokorli_ner / config.json
chnaaam
update label config
8cfea0d
raw
history blame
2.94 kB
{
"_name_or_path": "brokorli/brokorli_ner",
"_num_labels": 62,
"architectures": [
"ElectraForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"embedding_size": 768,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "E-AM",
"1": "I-TR",
"2": "S-QT",
"3": "I-AM",
"4": "B-FD",
"5": "I-MT",
"6": "B-DT",
"7": "I-QT",
"8": "I-PS",
"9": "S-OG",
"10": "S-EV",
"11": "S-PT",
"12": "S-DT",
"13": "[PAD]",
"14": "B-EV",
"15": "B-TM",
"16": "S-TI",
"17": "I-TM",
"18": "E-PS",
"19": "E-TR",
"20": "E-MT",
"21": "I-EV",
"22": "S-TM",
"23": "I-AF",
"24": "B-OG",
"25": "E-FD",
"26": "E-TI",
"27": "I-FD",
"28": "B-LC",
"29": "E-OG",
"30": "B-PT",
"31": "S-TR",
"32": "S-CV",
"33": "I-DT",
"34": "I-OG",
"35": "E-DT",
"36": "E-CV",
"37": "I-LC",
"38": "I-CV",
"39": "B-MT",
"40": "I-PT",
"41": "E-EV",
"42": "B-CV",
"43": "B-AM",
"44": "I-TI",
"45": "S-AM",
"46": "E-TM",
"47": "B-TR",
"48": "S-FD",
"49": "E-LC",
"50": "B-PS",
"51": "E-QT",
"52": "B-AF",
"53": "E-PT",
"54": "O",
"55": "E-AF",
"56": "S-AF",
"57": "S-MT",
"58": "B-TI",
"59": "S-PS",
"60": "S-LC",
"61": "B-QT"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-AF": 52,
"B-AM": 43,
"B-CV": 42,
"B-DT": 6,
"B-EV": 14,
"B-FD": 4,
"B-LC": 28,
"B-MT": 39,
"B-OG": 24,
"B-PS": 50,
"B-PT": 30,
"B-QT": 61,
"B-TI": 58,
"B-TM": 15,
"B-TR": 47,
"E-AF": 55,
"E-AM": 0,
"E-CV": 36,
"E-DT": 35,
"E-EV": 41,
"E-FD": 25,
"E-LC": 49,
"E-MT": 20,
"E-OG": 29,
"E-PS": 18,
"E-PT": 53,
"E-QT": 51,
"E-TI": 26,
"E-TM": 46,
"E-TR": 19,
"I-AF": 23,
"I-AM": 3,
"I-CV": 38,
"I-DT": 33,
"I-EV": 21,
"I-FD": 27,
"I-LC": 37,
"I-MT": 5,
"I-OG": 34,
"I-PS": 8,
"I-PT": 40,
"I-QT": 7,
"I-TI": 44,
"I-TM": 17,
"I-TR": 1,
"O": 54,
"S-AF": 56,
"S-AM": 45,
"S-CV": 32,
"S-DT": 12,
"S-EV": 10,
"S-FD": 48,
"S-LC": 60,
"S-MT": 57,
"S-OG": 9,
"S-PS": 59,
"S-PT": 11,
"S-QT": 2,
"S-TI": 16,
"S-TM": 22,
"S-TR": 31,
"[PAD]": 13
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "electra",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"summary_activation": "gelu",
"summary_last_dropout": 0.1,
"summary_type": "first",
"summary_use_proj": true,
"torch_dtype": "float32",
"transformers_version": "4.19.2",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 35000
}