{ "emb_size": 1024, "feedforward_size": 4096, "hidden_size": 1024, "hidden_act": "gelu", "heads_num": 16, "layers_num": 12, "decoder_layers_num": 12, "max_seq_length": 1024, "dropout": 0.1, "data_processor": "bart", "embedding": ["word", "pos"], "tgt_embedding": ["word", "pos"], "share_embedding": true, "encoder": "transformer", "mask": "fully_visible", "decoder": "transformer", "target": ["lm"], "tie_weights": true, "has_lmtarget_bias": true }