LongVA-7B / config.json
kcz358's picture
Upload folder using huggingface_hub
19f42dd verified
raw
history blame
8.65 kB
{
"_name_or_path": "LongVa/Qwen2-7B-Instruct-extend-step_1000",
"architectures": [
"LlavaQwenForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 151643,
"eos_token_id": 151645,
"hidden_act": "silu",
"hidden_size": 3584,
"image_aspect_ratio": "anyres",
"image_crop_resolution": null,
"image_grid_pinpoints": [
[
336,
672
],
[
336,
1008
],
[
336,
1344
],
[
336,
1680
],
[
336,
2016
],
[
336,
2352
],
[
336,
2688
],
[
336,
3024
],
[
336,
3360
],
[
336,
3696
],
[
336,
4032
],
[
336,
4368
],
[
336,
4704
],
[
336,
5040
],
[
336,
5376
],
[
336,
5712
],
[
336,
6048
],
[
336,
6384
],
[
336,
6720
],
[
336,
7056
],
[
336,
7392
],
[
336,
7728
],
[
336,
8064
],
[
336,
8400
],
[
336,
8736
],
[
336,
9072
],
[
336,
9408
],
[
336,
9744
],
[
336,
10080
],
[
336,
10416
],
[
336,
10752
],
[
336,
11088
],
[
336,
11424
],
[
336,
11760
],
[
336,
12096
],
[
336,
12432
],
[
336,
12768
],
[
336,
13104
],
[
336,
13440
],
[
336,
13776
],
[
336,
14112
],
[
336,
14448
],
[
336,
14784
],
[
336,
15120
],
[
336,
15456
],
[
336,
15792
],
[
336,
16128
],
[
336,
16464
],
[
672,
336
],
[
672,
672
],
[
672,
1008
],
[
672,
1344
],
[
672,
1680
],
[
672,
2016
],
[
672,
2352
],
[
672,
2688
],
[
672,
3024
],
[
672,
3360
],
[
672,
3696
],
[
672,
4032
],
[
672,
4368
],
[
672,
4704
],
[
672,
5040
],
[
672,
5376
],
[
672,
5712
],
[
672,
6048
],
[
672,
6384
],
[
672,
6720
],
[
672,
7056
],
[
672,
7392
],
[
672,
7728
],
[
672,
8064
],
[
1008,
336
],
[
1008,
672
],
[
1008,
1008
],
[
1008,
1344
],
[
1008,
1680
],
[
1008,
2016
],
[
1008,
2352
],
[
1008,
2688
],
[
1008,
3024
],
[
1008,
3360
],
[
1008,
3696
],
[
1008,
4032
],
[
1008,
4368
],
[
1008,
4704
],
[
1008,
5040
],
[
1008,
5376
],
[
1344,
336
],
[
1344,
672
],
[
1344,
1008
],
[
1344,
1344
],
[
1344,
1680
],
[
1344,
2016
],
[
1344,
2352
],
[
1344,
2688
],
[
1344,
3024
],
[
1344,
3360
],
[
1344,
3696
],
[
1344,
4032
],
[
1680,
336
],
[
1680,
672
],
[
1680,
1008
],
[
1680,
1344
],
[
1680,
1680
],
[
1680,
2016
],
[
1680,
2352
],
[
1680,
2688
],
[
1680,
3024
],
[
2016,
336
],
[
2016,
672
],
[
2016,
1008
],
[
2016,
1344
],
[
2016,
1680
],
[
2016,
2016
],
[
2016,
2352
],
[
2016,
2688
],
[
2352,
336
],
[
2352,
672
],
[
2352,
1008
],
[
2352,
1344
],
[
2352,
1680
],
[
2352,
2016
],
[
2352,
2352
],
[
2688,
336
],
[
2688,
672
],
[
2688,
1008
],
[
2688,
1344
],
[
2688,
1680
],
[
2688,
2016
],
[
3024,
336
],
[
3024,
672
],
[
3024,
1008
],
[
3024,
1344
],
[
3024,
1680
],
[
3360,
336
],
[
3360,
672
],
[
3360,
1008
],
[
3360,
1344
],
[
3696,
336
],
[
3696,
672
],
[
3696,
1008
],
[
3696,
1344
],
[
4032,
336
],
[
4032,
672
],
[
4032,
1008
],
[
4032,
1344
],
[
4368,
336
],
[
4368,
672
],
[
4368,
1008
],
[
4704,
336
],
[
4704,
672
],
[
4704,
1008
],
[
5040,
336
],
[
5040,
672
],
[
5040,
1008
],
[
5376,
336
],
[
5376,
672
],
[
5376,
1008
],
[
5712,
336
],
[
5712,
672
],
[
6048,
336
],
[
6048,
672
],
[
6384,
336
],
[
6384,
672
],
[
6720,
336
],
[
6720,
672
],
[
7056,
336
],
[
7056,
672
],
[
7392,
336
],
[
7392,
672
],
[
7728,
336
],
[
7728,
672
],
[
8064,
336
],
[
8064,
672
],
[
8400,
336
],
[
8736,
336
],
[
9072,
336
],
[
9408,
336
],
[
9744,
336
],
[
10080,
336
],
[
10416,
336
],
[
10752,
336
],
[
11088,
336
],
[
11424,
336
],
[
11760,
336
],
[
12096,
336
],
[
12432,
336
],
[
12768,
336
],
[
13104,
336
],
[
13440,
336
],
[
13776,
336
],
[
14112,
336
],
[
14448,
336
],
[
14784,
336
],
[
15120,
336
],
[
15456,
336
],
[
15792,
336
],
[
16128,
336
],
[
16464,
336
]
],
"image_split_resolution": null,
"initializer_range": 0.02,
"intermediate_size": 18944,
"max_position_embeddings": 32768,
"max_window_layers": 28,
"mm_hidden_size": 1024,
"mm_patch_merge_type": "long_video_avgpool2x2",
"mm_projector_lr": null,
"mm_projector_type": "mlp2x_gelu",
"mm_resampler_type": null,
"mm_tunable_parts": "mm_vision_tower,mm_mlp_adapter,mm_language_model",
"mm_use_im_patch_token": false,
"mm_use_im_start_end": false,
"mm_vision_select_feature": "patch",
"mm_vision_select_layer": -2,
"mm_vision_tower": "openai/clip-vit-large-patch14-336",
"mm_vision_tower_lr": 2e-06,
"model_type": "qwen2",
"num_attention_heads": 28,
"num_hidden_layers": 28,
"num_key_value_heads": 4,
"pos_skipping_range": 4096,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 1000000000.0,
"sliding_window": 131072,
"tie_word_embeddings": false,
"tokenizer_model_max_length": 32768,
"tokenizer_padding_side": "right",
"torch_dtype": "bfloat16",
"transformers_version": "4.40.0.dev0",
"use_cache": true,
"use_mm_proj": true,
"use_pos_skipping": false,
"use_sliding_window": false,
"vision_tower_pretrained": null,
"vocab_size": 152064
}