VISOR-GPT / train /models /pegasus /large_config.json
szukevin's picture
upload
7900c16
raw
history blame
577 Bytes
{
"emb_size": 1024,
"feedforward_size": 4096,
"hidden_size": 1024,
"hidden_act": "relu",
"heads_num": 16,
"layers_num": 16,
"decoder_layers_num": 16,
"max_seq_length": 1024,
"dropout": 0.1,
"data_processor": "gsg",
"embedding": ["word", "sinusoidalpos"],
"remove_embedding_layernorm": true,
"tgt_embedding": ["word", "sinusoidalpos"],
"share_embedding": true,
"encoder": "transformer",
"mask": "fully_visible",
"layernorm_positioning": "pre",
"decoder": "transformer",
"target": ["lm"],
"has_lmtarget_bias": true,
"tie_weights": true
}