File size: 1,212 Bytes
001cc1f 0db6a37 001cc1f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
from transformers import PretrainedConfig
from typing import List
class AbLangConfig(PretrainedConfig):
model_type = "bert"
def __init__(
self,
max_position_embeddings: int=160,
hidden_size: int=768,
num_hidden_layers: int=12,
num_attention_heads: int=12,
attention_probs_dropout_prob: float=0.1,
intermediate_size: int=3072,
hidden_act: str="gelu",
hidden_dropout_prob: float=0.1,
initializer_range: float=0.02,
layer_norm_eps: float=1e-12,
chain: str="heavy",
**kwargs,
):
self.ptid = 21
self.vocab_size=24
self.max_position_embeddings=max_position_embeddings
self.hidden_size=hidden_size
self.num_hidden_layers=num_hidden_layers
self.num_attention_heads=num_attention_heads
self.attention_probs_dropout_prob=attention_probs_dropout_prob
self.intermediate_size=intermediate_size
self.hidden_act=hidden_act
self.hidden_dropout_prob=hidden_dropout_prob
self.initializer_range=initializer_range
self.layer_norm_eps=layer_norm_eps
self.chain=chain
super().__init__(**kwargs) |