{ | |
"_name_or_path": "state-spaces/mamba-2.8b-hf", | |
"architectures": [ | |
"MambaForCausalLM" | |
], | |
"bos_token_id": 0, | |
"conv_kernel": 4, | |
"eos_token_id": 0, | |
"expand": 2, | |
"fused_add_norm": true, | |
"hidden_act": "silu", | |
"hidden_size": 2560, | |
"initializer_range": 0.1, | |
"intermediate_size": 5120, | |
"layer_norm_epsilon": 1e-05, | |
"model_type": "mamba", | |
"n_layer": 64, | |
"num_hidden_layers": 64, | |
"pad_token_id": 0, | |
"pad_vocab_size_multiple": 8, | |
"rescale_prenorm_residual": false, | |
"residual_in_fp32": true, | |
"rms_norm": true, | |
"state_size": 16, | |
"time_step_floor": 0.0001, | |
"time_step_init_scheme": "random", | |
"time_step_max": 0.1, | |
"time_step_min": 0.001, | |
"time_step_rank": 160, | |
"time_step_scale": 1.0, | |
"torch_dtype": "bfloat16", | |
"transformers_version": "4.39.0.dev0", | |
"use_bias": false, | |
"use_cache": true, | |
"use_conv_bias": true, | |
"vocab_size": 50280 | |
} | |