{ | |
"activation_dropout": 0.1, | |
"activation_function": "gelu", | |
"architectures": [ | |
"MBartForConditionalGeneration" | |
], | |
"attention_dropout": 0.1, | |
"bos_token_id": 64000, | |
"d_model": 1024, | |
"classifier_dropout": 0.0, | |
"decoder_attention_heads": 16, | |
"decoder_ffn_dim": 4096, | |
"decoder_layerdrop": 0.0, | |
"decoder_layers": 6, | |
"dropout": 0.1, | |
"encoder_attention_heads": 16, | |
"encoder_ffn_dim": 4096, | |
"encoder_layerdrop": 0.0, | |
"encoder_layers": 6, | |
"eos_token_id": 64001, | |
"gradient_checkpointing": false, | |
"init_std": 0.02, | |
"is_encoder_decoder": true, | |
"max_position_embeddings": 1024, | |
"model_type": "mbart", | |
"num_hidden_layers": 6, | |
"pad_token_id": 0, | |
"scale_embedding": false, | |
"transformers_version": "4.3.2", | |
"use_cache": true, | |
"vocab_size": 64014, | |
"tokenizer_class": "AlbertTokenizer" | |
} | |