Update config.json
Browse files- config.json +2 -2
config.json
CHANGED
@@ -815,7 +815,7 @@
|
|
815 |
"image_split_resolution": null,
|
816 |
"initializer_range": 0.02,
|
817 |
"intermediate_size": 18944,
|
818 |
-
"max_position_embeddings":
|
819 |
"max_window_layers": 28,
|
820 |
"mm_hidden_size": 1024,
|
821 |
"mm_patch_merge_type": "unires",
|
@@ -841,7 +841,7 @@
|
|
841 |
"rope_theta": 1000000000.0,
|
842 |
"sliding_window": 131072,
|
843 |
"tie_word_embeddings": false,
|
844 |
-
"tokenizer_model_max_length":
|
845 |
"tokenizer_padding_side": "right",
|
846 |
"torch_dtype": "bfloat16",
|
847 |
"transformers_version": "4.40.0.dev0",
|
|
|
815 |
"image_split_resolution": null,
|
816 |
"initializer_range": 0.02,
|
817 |
"intermediate_size": 18944,
|
818 |
+
"max_position_embeddings": 224000,
|
819 |
"max_window_layers": 28,
|
820 |
"mm_hidden_size": 1024,
|
821 |
"mm_patch_merge_type": "unires",
|
|
|
841 |
"rope_theta": 1000000000.0,
|
842 |
"sliding_window": 131072,
|
843 |
"tie_word_embeddings": false,
|
844 |
+
"tokenizer_model_max_length": 224000,
|
845 |
"tokenizer_padding_side": "right",
|
846 |
"torch_dtype": "bfloat16",
|
847 |
"transformers_version": "4.40.0.dev0",
|