llava_15_siglip / config.json
giobin's picture
Upload folder using huggingface_hub
5ee4ae3 verified
{
"_name_or_path": "/leonardo_scratch/fast/FBKLM_prj1/PROJECTS/llava-1.5-hf-siglip/ckpts/siglip-llava15_PT/checkpoint-8600",
"architectures": [
"LlavaForConditionalGeneration"
],
"ignore_index": -100,
"image_token_index": 32000,
"model_type": "llava",
"pad_token_id": 32001,
"projector_hidden_act": "gelu",
"text_config": {
"_name_or_path": "vicuna-7b-v1.5",
"architectures": [
"LlamaForCausalLM"
],
"max_position_embeddings": 4096,
"model_type": "llama",
"pad_token_id": 0,
"rms_norm_eps": 1e-05,
"torch_dtype": "bfloat16",
"vocab_size": 32064
},
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.41.1",
"vision_config": {
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 1152,
"image_size": 384,
"intermediate_size": 4304,
"layer_norm_eps": 1e-06,
"model_type": "siglip_vision_model",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"patch_size": 14,
"torch_dtype": "bfloat16"
},
"vision_feature_layer": -2,
"vision_feature_select_strategy": "default"
}