numbmelon commited on
Commit
698a175
1 Parent(s): 0c94b66

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +301 -0
config.json ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_commit_hash": null,
3
+ "_name_or_path": "OS-Atlas-Base-4B",
4
+ "architectures": [
5
+ "InternVLChatModel"
6
+ ],
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_internvl_chat.InternVLChatConfig",
9
+ "AutoModel": "modeling_internvl_chat.InternVLChatModel",
10
+ "AutoModelForCausalLM": "modeling_internvl_chat.InternVLChatModel"
11
+ },
12
+ "downsample_ratio": 0.5,
13
+ "dynamic_image_size": true,
14
+ "force_image_size": 448,
15
+ "llm_config": {
16
+ "_name_or_path": "./pretrained/Phi-3-mini-128k-instruct",
17
+ "add_cross_attention": false,
18
+ "architectures": [
19
+ "Phi3ForCausalLM"
20
+ ],
21
+ "attention_dropout": 0.0,
22
+ "auto_map": {
23
+ "AutoConfig": "configuration_phi3.Phi3Config",
24
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
25
+ },
26
+ "bad_words_ids": null,
27
+ "begin_suppress_tokens": null,
28
+ "bos_token_id": 1,
29
+ "chunk_size_feed_forward": 0,
30
+ "cross_attention_hidden_size": null,
31
+ "decoder_start_token_id": null,
32
+ "diversity_penalty": 0.0,
33
+ "do_sample": false,
34
+ "early_stopping": false,
35
+ "embd_pdrop": 0.0,
36
+ "encoder_no_repeat_ngram_size": 0,
37
+ "eos_token_id": 32000,
38
+ "exponential_decay_length_penalty": null,
39
+ "finetuning_task": null,
40
+ "forced_bos_token_id": null,
41
+ "forced_eos_token_id": null,
42
+ "hidden_act": "silu",
43
+ "hidden_size": 3072,
44
+ "id2label": {
45
+ "0": "LABEL_0",
46
+ "1": "LABEL_1"
47
+ },
48
+ "initializer_range": 0.02,
49
+ "intermediate_size": 8192,
50
+ "is_decoder": false,
51
+ "is_encoder_decoder": false,
52
+ "label2id": {
53
+ "LABEL_0": 0,
54
+ "LABEL_1": 1
55
+ },
56
+ "length_penalty": 1.0,
57
+ "max_length": 20,
58
+ "max_position_embeddings": 131072,
59
+ "min_length": 0,
60
+ "model_type": "phi3",
61
+ "no_repeat_ngram_size": 0,
62
+ "num_attention_heads": 32,
63
+ "num_beam_groups": 1,
64
+ "num_beams": 1,
65
+ "num_hidden_layers": 32,
66
+ "num_key_value_heads": 32,
67
+ "num_return_sequences": 1,
68
+ "original_max_position_embeddings": 4096,
69
+ "output_attentions": false,
70
+ "output_hidden_states": false,
71
+ "output_scores": false,
72
+ "pad_token_id": 32000,
73
+ "prefix": null,
74
+ "problem_type": null,
75
+ "pruned_heads": {},
76
+ "remove_invalid_values": false,
77
+ "repetition_penalty": 1.0,
78
+ "resid_pdrop": 0.0,
79
+ "return_dict": true,
80
+ "return_dict_in_generate": false,
81
+ "rms_norm_eps": 1e-05,
82
+ "rope_scaling": {
83
+ "long_factor": [
84
+ 1.0299999713897705,
85
+ 1.0499999523162842,
86
+ 1.0499999523162842,
87
+ 1.0799999237060547,
88
+ 1.2299998998641968,
89
+ 1.2299998998641968,
90
+ 1.2999999523162842,
91
+ 1.4499999284744263,
92
+ 1.5999999046325684,
93
+ 1.6499998569488525,
94
+ 1.8999998569488525,
95
+ 2.859999895095825,
96
+ 3.68999981880188,
97
+ 5.419999599456787,
98
+ 5.489999771118164,
99
+ 5.489999771118164,
100
+ 9.09000015258789,
101
+ 11.579999923706055,
102
+ 15.65999984741211,
103
+ 15.769999504089355,
104
+ 15.789999961853027,
105
+ 18.360000610351562,
106
+ 21.989999771118164,
107
+ 23.079999923706055,
108
+ 30.009998321533203,
109
+ 32.35000228881836,
110
+ 32.590003967285156,
111
+ 35.56000518798828,
112
+ 39.95000457763672,
113
+ 53.840003967285156,
114
+ 56.20000457763672,
115
+ 57.95000457763672,
116
+ 59.29000473022461,
117
+ 59.77000427246094,
118
+ 59.920005798339844,
119
+ 61.190006256103516,
120
+ 61.96000671386719,
121
+ 62.50000762939453,
122
+ 63.3700065612793,
123
+ 63.48000717163086,
124
+ 63.48000717163086,
125
+ 63.66000747680664,
126
+ 63.850006103515625,
127
+ 64.08000946044922,
128
+ 64.760009765625,
129
+ 64.80001068115234,
130
+ 64.81001281738281,
131
+ 64.81001281738281
132
+ ],
133
+ "short_factor": [
134
+ 1.05,
135
+ 1.05,
136
+ 1.05,
137
+ 1.1,
138
+ 1.1,
139
+ 1.1500000000000001,
140
+ 1.2000000000000002,
141
+ 1.2500000000000002,
142
+ 1.3000000000000003,
143
+ 1.3500000000000003,
144
+ 1.5000000000000004,
145
+ 2.000000000000001,
146
+ 2.000000000000001,
147
+ 2.000000000000001,
148
+ 2.000000000000001,
149
+ 2.000000000000001,
150
+ 2.000000000000001,
151
+ 2.000000000000001,
152
+ 2.000000000000001,
153
+ 2.000000000000001,
154
+ 2.000000000000001,
155
+ 2.000000000000001,
156
+ 2.000000000000001,
157
+ 2.000000000000001,
158
+ 2.000000000000001,
159
+ 2.000000000000001,
160
+ 2.000000000000001,
161
+ 2.000000000000001,
162
+ 2.000000000000001,
163
+ 2.000000000000001,
164
+ 2.000000000000001,
165
+ 2.000000000000001,
166
+ 2.0500000000000007,
167
+ 2.0500000000000007,
168
+ 2.0500000000000007,
169
+ 2.1000000000000005,
170
+ 2.1000000000000005,
171
+ 2.1000000000000005,
172
+ 2.1500000000000004,
173
+ 2.1500000000000004,
174
+ 2.3499999999999996,
175
+ 2.549999999999999,
176
+ 2.5999999999999988,
177
+ 2.5999999999999988,
178
+ 2.7499999999999982,
179
+ 2.849999999999998,
180
+ 2.849999999999998,
181
+ 2.9499999999999975
182
+ ],
183
+ "type": "su"
184
+ },
185
+ "rope_theta": 10000.0,
186
+ "sep_token_id": null,
187
+ "sliding_window": 262144,
188
+ "suppress_tokens": null,
189
+ "task_specific_params": null,
190
+ "temperature": 1.0,
191
+ "tf_legacy_loss": false,
192
+ "tie_encoder_decoder": false,
193
+ "tie_word_embeddings": false,
194
+ "tokenizer_class": null,
195
+ "top_k": 50,
196
+ "top_p": null,
197
+ "torch_dtype": "bfloat16",
198
+ "torchscript": false,
199
+ "transformers_version": "4.41.2",
200
+ "typical_p": 1.0,
201
+ "use_bfloat16": true,
202
+ "use_cache": false,
203
+ "vocab_size": 32022
204
+ },
205
+ "max_dynamic_patch": 6,
206
+ "min_dynamic_patch": 1,
207
+ "model_type": "internvl_chat",
208
+ "pad2square": false,
209
+ "ps_version": "v2",
210
+ "select_layer": -1,
211
+ "template": "phi3-chat",
212
+ "torch_dtype": "bfloat16",
213
+ "transformers_version": null,
214
+ "use_backbone_lora": 0,
215
+ "use_llm_lora": 0,
216
+ "use_thumbnail": true,
217
+ "vision_config": {
218
+ "_name_or_path": "",
219
+ "add_cross_attention": false,
220
+ "architectures": [
221
+ "InternVisionModel"
222
+ ],
223
+ "attention_dropout": 0.0,
224
+ "bad_words_ids": null,
225
+ "begin_suppress_tokens": null,
226
+ "bos_token_id": null,
227
+ "chunk_size_feed_forward": 0,
228
+ "cross_attention_hidden_size": null,
229
+ "decoder_start_token_id": null,
230
+ "diversity_penalty": 0.0,
231
+ "do_sample": false,
232
+ "drop_path_rate": 0.1,
233
+ "dropout": 0.0,
234
+ "early_stopping": false,
235
+ "encoder_no_repeat_ngram_size": 0,
236
+ "eos_token_id": null,
237
+ "exponential_decay_length_penalty": null,
238
+ "finetuning_task": null,
239
+ "forced_bos_token_id": null,
240
+ "forced_eos_token_id": null,
241
+ "hidden_act": "gelu",
242
+ "hidden_size": 1024,
243
+ "id2label": {
244
+ "0": "LABEL_0",
245
+ "1": "LABEL_1"
246
+ },
247
+ "image_size": 448,
248
+ "initializer_factor": 1.0,
249
+ "initializer_range": 0.02,
250
+ "intermediate_size": 4096,
251
+ "is_decoder": false,
252
+ "is_encoder_decoder": false,
253
+ "label2id": {
254
+ "LABEL_0": 0,
255
+ "LABEL_1": 1
256
+ },
257
+ "layer_norm_eps": 1e-06,
258
+ "length_penalty": 1.0,
259
+ "max_length": 20,
260
+ "min_length": 0,
261
+ "model_type": "intern_vit_6b",
262
+ "no_repeat_ngram_size": 0,
263
+ "norm_type": "layer_norm",
264
+ "num_attention_heads": 16,
265
+ "num_beam_groups": 1,
266
+ "num_beams": 1,
267
+ "num_channels": 3,
268
+ "num_hidden_layers": 24,
269
+ "num_return_sequences": 1,
270
+ "output_attentions": false,
271
+ "output_hidden_states": false,
272
+ "output_scores": false,
273
+ "pad_token_id": null,
274
+ "patch_size": 14,
275
+ "prefix": null,
276
+ "problem_type": null,
277
+ "pruned_heads": {},
278
+ "qk_normalization": false,
279
+ "qkv_bias": true,
280
+ "remove_invalid_values": false,
281
+ "repetition_penalty": 1.0,
282
+ "return_dict": true,
283
+ "return_dict_in_generate": false,
284
+ "sep_token_id": null,
285
+ "suppress_tokens": null,
286
+ "task_specific_params": null,
287
+ "temperature": 1.0,
288
+ "tf_legacy_loss": false,
289
+ "tie_encoder_decoder": false,
290
+ "tie_word_embeddings": true,
291
+ "tokenizer_class": null,
292
+ "top_k": 50,
293
+ "top_p": null,
294
+ "torch_dtype": "bfloat16",
295
+ "torchscript": false,
296
+ "transformers_version": "4.41.2",
297
+ "typical_p": 1.0,
298
+ "use_bfloat16": true,
299
+ "use_flash_attn": true
300
+ }
301
+ }