{ "emb_size": 128, "hidden_size": 807, "layers_num": 9, "kernel_size": 4, "block_size": 2, "max_seq_length": 1024, "dropout": 0.1, "data_processor": "lm", "embedding": ["word"], "remove_embedding_layernorm": true, "encoder": "gatedcnn", "target": ["lm"] }