{ "_name_or_path": "FacebookAI/xlm-roberta-large", "architectures": [ "XLMRobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "O", "1": "Lo\u1ea1i da", "2": "D\u00e1ng khu\u00f4n m\u1eb7t", "3": "Chi\u1ec1u cao kh\u00e1ch h\u00e0ng", "4": "C\u00e2n n\u1eb7ng kh\u00e1ch h\u00e0ng", "5": "D\u00e1ng ng\u01b0\u1eddi", "6": "M\u00e0u da", "7": "\u0110\u1eb7c \u0111i\u1ec3m kh\u00e1c c\u1ee7a da" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "Chi\u1ec1u cao kh\u00e1ch h\u00e0ng": 3, "C\u00e2n n\u1eb7ng kh\u00e1ch h\u00e0ng": 4, "D\u00e1ng khu\u00f4n m\u1eb7t": 2, "D\u00e1ng ng\u01b0\u1eddi": 5, "Lo\u1ea1i da": 1, "M\u00e0u da": 6, "O": 0, "\u0110\u1eb7c \u0111i\u1ec3m kh\u00e1c c\u1ee7a da": 7 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.44.0", "type_vocab_size": 1, "use_cache": true, "vocab_size": 250002 }