mamba2-780m / config.json
brunopio's picture
Upload folder using huggingface_hub
4669900 verified
raw
history blame
331 Bytes
{
"d_model": 1536,
"d_intermediate": 0,
"n_layer": 48,
"vocab_size": 50277,
"ssm_cfg": {
"layer": "Mamba2"
},
"attn_layer_idx": [],
"attn_cfg": {},
"rms_norm": true,
"residual_in_fp32": true,
"fused_add_norm": true,
"pad_vocab_size_multiple": 16,
"tie_embeddings": true
}