syp115 commited on
Commit
ad5be98
1 Parent(s): a7ed63c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "checkpoints/llava-v1.5-7b-558k_sharegpt4v_pretrain-qladder_Q64-SLA_V64L192",
3
  "architectures": [
4
- "LlavaLlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
@@ -22,8 +22,8 @@
22
  "mm_use_im_start_end": false,
23
  "mm_vision_select_feature": "patch",
24
  "mm_vision_select_layer": -2,
25
- "mm_vision_tower": "/root/paddlejob/workspace/env_run/_zhanghuaxin/MLLM/LLaVA/checkpoints/llava-v1.5-7b-pretrain-qladder-558k_Q64/clip-qladder-copied-558k-Q64",
26
- "model_type": "llava_llama",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 32,
29
  "num_key_value_heads": 32,
 
1
  {
2
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
3
  "architectures": [
4
+ "ArcanaLlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
22
  "mm_use_im_start_end": false,
23
  "mm_vision_select_feature": "patch",
24
  "mm_vision_select_layer": -2,
25
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
26
+ "model_type": "arcana_llama",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 32,
29
  "num_key_value_heads": 32,