{ "_name_or_path": "Nhanvi282/LLaVA_Qwen2.5_0.5b_SigLIPStage2", "architectures": [ "MoELLaVAQwen2ForCausalLM" ], "attention_dropout": 0.0, "bos_token_id": 151643, "eos_token_id": 151643, "freeze_mm_mlp_adapter": false, "hidden_act": "silu", "hidden_size": 896, "image_aspect_ratio": "pad", "image_projector_type": "mlp2x_gelu", "initializer_range": 0.02, "intermediate_size": 4864, "lora": {}, "max_position_embeddings": 32768, "max_window_layers": 24, "mm_hidden_size": 768, "mm_image_tower": "google/siglip-base-patch16-256-multilingual", "mm_projector_lr": null, "mm_use_im_patch_token": false, "mm_use_im_start_end": false, "mm_video_tower": null, "mm_vision_select_feature": "patch", "mm_vision_select_layer": -2, "model_type": "moe_llava_qwen2", "moe": { "capacity_factor": 1.5, "ep_size": 1, "eval_capacity_factor": 2.0, "min_capacity": 0, "moe_enable": true, "moe_layers_idx": [ 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22 ], "moe_mode": "sparse", "num_experts": [ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 ], "router_aux_loss_coef": 0.01, "top_k_experts": 2, "train_modules": [ "mlp.gate_proj", "mlp.up_proj", "mlp.down_proj", "wg" ], "use_residual": false }, "num_attention_heads": 14, "num_hidden_layers": 24, "num_key_value_heads": 2, "pad_token_id": 151643, "rms_norm_eps": 1e-06, "rope_theta": 1000000.0, "sliding_window": 32768, "tie_word_embeddings": true, "tokenizer_padding_side": "right", "torch_dtype": "float16", "transformers_version": "4.37.0", "tune_mm_mlp_adapter": false, "use_cache": true, "use_mm_proj": true, "use_mrope": false, "use_sliding_window": false, "video_global_proj": false, "video_projector_type": "linear", "video_spatial_proj": false, "video_temproal_proj": false, "vocab_size": 151936 }