{ "_name_or_path": "fla-hub/gsa-7B-mistral-100B", "architectures": [ "GSAForCausalLM" ], "bos_token_id": 1, "clamp_max": null, "clamp_min": null, "conv_size": 4, "elementwise_affine": true, "eos_token_id": 2, "expand_k": 1, "expand_v": 1, "feature_map": "swish", "fuse_cross_entropy": true, "fuse_norm": true, "gate_logit_normalizer": 8, "gate_low_rank_dim": null, "hidden_act": "swish", "hidden_ratio": 4, "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "gsa", "norm_eps": 1e-05, "norm_first": true, "num_heads": 32, "num_hidden_layers": 32, "num_kv_heads": 8, "num_slots": 64, "pad_token_id": 2, "share_conv_kernel": true, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.41.2", "use_cache": true, "use_norm": true, "use_output_gate": false, "use_rope": false, "use_short_conv": false, "vocab_size": 32000 }