{ "_name_or_path": "distilbert-base-uncased", "activation": "gelu", "architectures": [ "DistilBertForTokenClassification" ], "attention_dropout": 0.1, "dim": 768, "dropout": 0.1, "hidden_dim": 3072, "id2label": { "0": 0, "1": 19, "2": 20, "3": 7, "4": 8, "5": 25, "6": 26, "7": 23, "8": 24, "9": 27, "10": 21, "11": 1, "12": 2, "13": 13, "14": 14, "15": 28, "16": 11, "17": 15, "18": 16, "19": 12, "20": 3, "21": 4, "22": 5, "23": 22, "24": 9, "25": 17, "26": 18, "27": 6, "28": 10 }, "initializer_range": 0.02, "label2id": { "0": 0, "1": 11, "2": 12, "3": 20, "4": 21, "5": 22, "6": 27, "7": 3, "8": 4, "9": 24, "10": 28, "11": 16, "12": 19, "13": 13, "14": 14, "15": 17, "16": 18, "17": 25, "18": 26, "19": 1, "20": 2, "21": 10, "22": 23, "23": 7, "24": 8, "25": 5, "26": 6, "27": 9, "28": 15 }, "max_position_embeddings": 512, "model_type": "distilbert", "n_heads": 12, "n_layers": 6, "pad_token_id": 0, "qa_dropout": 0.1, "seq_classif_dropout": 0.2, "sinusoidal_pos_embds": false, "tie_weights_": true, "transformers_version": "4.20.1", "vocab_size": 30522 }