|
{ |
|
"_name_or_path": "bstds/id-roberta-ner", |
|
"architectures": [ |
|
"XLMRobertaForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 1024, |
|
"id2label": { |
|
"0": "B-CRD", |
|
"1": "B-DAT", |
|
"2": "B-EVT", |
|
"3": "B-FAC", |
|
"4": "B-GPE", |
|
"5": "B-LAN", |
|
"6": "B-LAW", |
|
"7": "B-LOC", |
|
"8": "B-MON", |
|
"9": "B-NOR", |
|
"10": "B-ORD", |
|
"11": "B-ORG", |
|
"12": "B-PER", |
|
"13": "B-PRC", |
|
"14": "B-PRD", |
|
"15": "B-QTY", |
|
"16": "B-REG", |
|
"17": "B-TIM", |
|
"18": "B-WOA", |
|
"19": "I-CRD", |
|
"20": "I-DAT", |
|
"21": "I-EVT", |
|
"22": "I-FAC", |
|
"23": "I-GPE", |
|
"24": "I-LAN", |
|
"25": "I-LAW", |
|
"26": "I-LOC", |
|
"27": "I-MON", |
|
"28": "I-NOR", |
|
"29": "I-ORD", |
|
"30": "I-ORG", |
|
"31": "I-PER", |
|
"32": "I-PRC", |
|
"33": "I-PRD", |
|
"34": "I-QTY", |
|
"35": "I-REG", |
|
"36": "I-TIM", |
|
"37": "I-WOA", |
|
"38": "O" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 4096, |
|
"label2id": { |
|
"B-CRD": "0", |
|
"B-DAT": "1", |
|
"B-EVT": "2", |
|
"B-FAC": "3", |
|
"B-GPE": "4", |
|
"B-LAN": "5", |
|
"B-LAW": "6", |
|
"B-LOC": "7", |
|
"B-MON": "8", |
|
"B-NOR": "9", |
|
"B-ORD": "10", |
|
"B-ORG": "11", |
|
"B-PER": "12", |
|
"B-PRC": "13", |
|
"B-PRD": "14", |
|
"B-QTY": "15", |
|
"B-REG": "16", |
|
"B-TIM": "17", |
|
"B-WOA": "18", |
|
"I-CRD": "19", |
|
"I-DAT": "20", |
|
"I-EVT": "21", |
|
"I-FAC": "22", |
|
"I-GPE": "23", |
|
"I-LAN": "24", |
|
"I-LAW": "25", |
|
"I-LOC": "26", |
|
"I-MON": "27", |
|
"I-NOR": "28", |
|
"I-ORD": "29", |
|
"I-ORG": "30", |
|
"I-PER": "31", |
|
"I-PRC": "32", |
|
"I-PRD": "33", |
|
"I-QTY": "34", |
|
"I-REG": "35", |
|
"I-TIM": "36", |
|
"I-WOA": "37", |
|
"O": "38" |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 514, |
|
"model_type": "xlm-roberta", |
|
"num_attention_heads": 16, |
|
"num_hidden_layers": 24, |
|
"output_past": true, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.26.1", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 250002 |
|
} |
|
|