{ "_name_or_path": "zhibinlu/vgcn-distilbert-base-uncased", "activation": "gelu", "architectures": [ "VGCNBertModel" ], "attention_dropout": 0.1, "auto_map": { "AutoConfig": "configuration_vgcn_bert.VGCNBertConfig", "AutoModel": "modeling_vgcn_bert.VGCNBertModel", "AutoModelForMaskedLM": "modeling_vgcn_bert.VGCNBertForMaskedLM", "AutoModelForSequenceClassification": "modeling_vgcn_bert.VGCNBertForSequenceClassification", "AutoModelForQuestionAnswering": "modeling_vgcn_bert.VGCNBertForQuestionAnswering", "AutoModelForTokenClassification": "modeling_vgcn_bert.VGCNBertForTokenClassification", "AutoModelForMultipleChoice": "modeling_vgcn_bert.VGCNBertForMultipleChoice" }, "dim": 768, "dropout": 0.1, "hidden_dim": 3072, "initializer_range": 0.02, "max_position_embeddings": 512, "model_type": "vgcn-bert", "n_heads": 12, "n_layers": 6, "pad_token_id": 0, "qa_dropout": 0.1, "seq_classif_dropout": 0.2, "sinusoidal_pos_embds": false, "tie_weights_": true, "torch_dtype": "float32", "transformers_version": "4.30", "vgcn_activation": null, "vgcn_dropout": 0.1, "vgcn_graph_embds_dim": 16, "vgcn_hidden_dim": 128, "vgcn_weight_init_mode": "transparent", "vocab_size": 30522 }