ssm-100m / config.json
anrilombard's picture
Upload config.json with huggingface_hub
aae5600 verified
raw
history blame contribute delete
617 Bytes
{
"model_type": "mamba",
"d_model": 768,
"d_intermediate": 0,
"n_layer": 12,
"vocab_size": 1880,
"ssm_cfg": {
"layer": "Mamba2"
},
"attn_layer_idx": [],
"attn_cfg": {},
"rms_norm": true,
"residual_in_fp32": true,
"fused_add_norm": true,
"pad_vocab_size_multiple": 16,
"tie_embeddings": true,
"dropout_rate": 0.1,
"eos_token_id": 2,
"bos_token_id": 1,
"pad_token_id": 3,
"num_labels": 1,
"summary_type": "cls_index",
"summary_use_proj": true,
"summary_activation": "tanh",
"summary_proj_to_labels": true,
"summary_first_dropout": 0.4,
"summary_hidden_size": 128
}