|
{ |
|
"_name_or_path": "vinai/phobert-large", |
|
"architectures": [ |
|
"RobertaForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 1024, |
|
"id2label": { |
|
"0": "O", |
|
"1": "Ti\u1ec1n c\u1ee5 th\u1ec3", |
|
"2": "M\u00e3 \u0111\u01a1n", |
|
"3": "S\u1ed1 c\u00f4ng", |
|
"4": "V\u1ecb tr\u00ed", |
|
"5": "S\u1ed1 \u0111i\u1ec3m", |
|
"6": "C\u00f4ng", |
|
"7": "L\u01b0\u01a1ng", |
|
"8": "Ti\u1ec1n tr\u1eebu t\u01b0\u1ee3ng", |
|
"9": "S\u1ed1 \u0111\u01a1n", |
|
"10": "Th\u00e1ng tr\u1eebu t\u01b0\u1ee3ng", |
|
"11": "Th\u1ee9", |
|
"12": "Ph\u1ee5 c\u1ea5p", |
|
"13": "S\u1ed1 gi\u1edd", |
|
"14": "Kho\u1ea3ng th\u1eddi gian", |
|
"15": "Th\u00f4ng tin CTT", |
|
"16": "Kho", |
|
"17": "H\u00ecnh th\u1ee9c l\u00e0m vi\u1ec7c", |
|
"18": "Gi\u1ea5y t\u1edd", |
|
"19": "\u0110\u1ee3t", |
|
"20": "T\u1ef7 l\u1ec7", |
|
"21": "M\u00e3 s\u1ed1 thu\u1ebf" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 4096, |
|
"label2id": { |
|
"C\u00f4ng": 6, |
|
"Gi\u1ea5y t\u1edd": 18, |
|
"H\u00ecnh th\u1ee9c l\u00e0m vi\u1ec7c": 17, |
|
"Kho": 16, |
|
"Kho\u1ea3ng th\u1eddi gian": 14, |
|
"L\u01b0\u01a1ng": 7, |
|
"M\u00e3 s\u1ed1 thu\u1ebf": 21, |
|
"M\u00e3 \u0111\u01a1n": 2, |
|
"O": 0, |
|
"Ph\u1ee5 c\u1ea5p": 12, |
|
"S\u1ed1 c\u00f4ng": 3, |
|
"S\u1ed1 gi\u1edd": 13, |
|
"S\u1ed1 \u0111i\u1ec3m": 5, |
|
"S\u1ed1 \u0111\u01a1n": 9, |
|
"Th\u00e1ng tr\u1eebu t\u01b0\u1ee3ng": 10, |
|
"Th\u00f4ng tin CTT": 15, |
|
"Th\u1ee9": 11, |
|
"Ti\u1ec1n c\u1ee5 th\u1ec3": 1, |
|
"Ti\u1ec1n tr\u1eebu t\u01b0\u1ee3ng": 8, |
|
"T\u1ef7 l\u1ec7": 20, |
|
"V\u1ecb tr\u00ed": 4, |
|
"\u0110\u1ee3t": 19 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 258, |
|
"model_type": "roberta", |
|
"num_attention_heads": 16, |
|
"num_hidden_layers": 24, |
|
"pad_token_id": 1, |
|
"position_embedding_type": "absolute", |
|
"tokenizer_class": "PhobertTokenizer", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.44.2", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 64001 |
|
} |
|
|