Upload Pix2StructForConditionalGeneration
Browse files- config.json +3 -6
config.json
CHANGED
@@ -7,6 +7,7 @@
|
|
7 |
"decoder_start_token_id": 0,
|
8 |
"initializer_factor": 1.0,
|
9 |
"initializer_range": 0.02,
|
|
|
10 |
"model_type": "pix2struct",
|
11 |
"pad_token_id": 0,
|
12 |
"text_config": {
|
@@ -82,7 +83,7 @@
|
|
82 |
"top_p": 1.0,
|
83 |
"torch_dtype": null,
|
84 |
"torchscript": false,
|
85 |
-
"transformers_version": "4.
|
86 |
"typical_p": 1.0,
|
87 |
"use_bfloat16": false,
|
88 |
"use_cache": false,
|
@@ -114,13 +115,11 @@
|
|
114 |
"finetuning_task": null,
|
115 |
"forced_bos_token_id": null,
|
116 |
"forced_eos_token_id": null,
|
117 |
-
"hidden_dropout_prob": 0.0,
|
118 |
"hidden_size": 1536,
|
119 |
"id2label": {
|
120 |
"0": "LABEL_0",
|
121 |
"1": "LABEL_1"
|
122 |
},
|
123 |
-
"image_size": 384,
|
124 |
"initializer_factor": 1.0,
|
125 |
"initializer_range": 0.02,
|
126 |
"is_decoder": false,
|
@@ -134,7 +133,6 @@
|
|
134 |
"length_penalty": 1.0,
|
135 |
"max_length": 20,
|
136 |
"min_length": 0,
|
137 |
-
"mlp_bias": false,
|
138 |
"model_type": "pix2struct_vision_model",
|
139 |
"no_repeat_ngram_size": 0,
|
140 |
"num_attention_heads": 24,
|
@@ -153,7 +151,6 @@
|
|
153 |
"problem_type": null,
|
154 |
"projection_dim": 768,
|
155 |
"pruned_heads": {},
|
156 |
-
"qkv_bias": false,
|
157 |
"relative_attention_max_distance": 128,
|
158 |
"relative_attention_num_buckets": 32,
|
159 |
"remove_invalid_values": false,
|
@@ -173,7 +170,7 @@
|
|
173 |
"top_p": 1.0,
|
174 |
"torch_dtype": null,
|
175 |
"torchscript": false,
|
176 |
-
"transformers_version": "4.
|
177 |
"typical_p": 1.0,
|
178 |
"use_bfloat16": false
|
179 |
}
|
|
|
7 |
"decoder_start_token_id": 0,
|
8 |
"initializer_factor": 1.0,
|
9 |
"initializer_range": 0.02,
|
10 |
+
"is_vqa": false,
|
11 |
"model_type": "pix2struct",
|
12 |
"pad_token_id": 0,
|
13 |
"text_config": {
|
|
|
83 |
"top_p": 1.0,
|
84 |
"torch_dtype": null,
|
85 |
"torchscript": false,
|
86 |
+
"transformers_version": "4.28.0.dev0",
|
87 |
"typical_p": 1.0,
|
88 |
"use_bfloat16": false,
|
89 |
"use_cache": false,
|
|
|
115 |
"finetuning_task": null,
|
116 |
"forced_bos_token_id": null,
|
117 |
"forced_eos_token_id": null,
|
|
|
118 |
"hidden_size": 1536,
|
119 |
"id2label": {
|
120 |
"0": "LABEL_0",
|
121 |
"1": "LABEL_1"
|
122 |
},
|
|
|
123 |
"initializer_factor": 1.0,
|
124 |
"initializer_range": 0.02,
|
125 |
"is_decoder": false,
|
|
|
133 |
"length_penalty": 1.0,
|
134 |
"max_length": 20,
|
135 |
"min_length": 0,
|
|
|
136 |
"model_type": "pix2struct_vision_model",
|
137 |
"no_repeat_ngram_size": 0,
|
138 |
"num_attention_heads": 24,
|
|
|
151 |
"problem_type": null,
|
152 |
"projection_dim": 768,
|
153 |
"pruned_heads": {},
|
|
|
154 |
"relative_attention_max_distance": 128,
|
155 |
"relative_attention_num_buckets": 32,
|
156 |
"remove_invalid_values": false,
|
|
|
170 |
"top_p": 1.0,
|
171 |
"torch_dtype": null,
|
172 |
"torchscript": false,
|
173 |
+
"transformers_version": "4.28.0.dev0",
|
174 |
"typical_p": 1.0,
|
175 |
"use_bfloat16": false
|
176 |
}
|