File size: 825 Bytes
750e21b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
from transformers import PretrainedConfig
class SRLModelConfig(PretrainedConfig):
model_type = "srl"
def __init__(
self,
num_labels=0,
bert_model_name="bert-base-uncased",
embedding_dropout=0.0,
label2id = {},
id2label = {},
**kwargs,
):
super().__init__(**kwargs)
self.num_labels = num_labels
self.bert_model_name = bert_model_name
self.embedding_dropout = embedding_dropout
self.label2id = label2id
self.id2label = id2label
def to_dict(self):
config_dict = super().to_dict()
config_dict["num_labels"] = self.num_labels
# config_dict["bert_model_name"] = self.bert_model_name
# config_dict["embedding_dropout"] = self.embedding_dropout
return config_dict
|