Upload config
Browse files- config.json +2 -2
config.json
CHANGED
@@ -43,8 +43,8 @@
|
|
43 |
"mlp_ratio": 4.0,
|
44 |
"model_type": "fan",
|
45 |
"norm_layer": null,
|
|
|
46 |
"num_classes": 1000,
|
47 |
-
"num_heads": 8,
|
48 |
"out_index": -1,
|
49 |
"patch_size": 16,
|
50 |
"qkv_bias": true,
|
@@ -68,7 +68,7 @@
|
|
68 |
1
|
69 |
],
|
70 |
"tokens_norm": true,
|
71 |
-
"transformers_version": "4.
|
72 |
"use_checkpoint": false,
|
73 |
"use_pos_embed": true
|
74 |
}
|
|
|
43 |
"mlp_ratio": 4.0,
|
44 |
"model_type": "fan",
|
45 |
"norm_layer": null,
|
46 |
+
"num_attention_heads": 8,
|
47 |
"num_classes": 1000,
|
|
|
48 |
"out_index": -1,
|
49 |
"patch_size": 16,
|
50 |
"qkv_bias": true,
|
|
|
68 |
1
|
69 |
],
|
70 |
"tokens_norm": true,
|
71 |
+
"transformers_version": "4.24.0.dev0",
|
72 |
"use_checkpoint": false,
|
73 |
"use_pos_embed": true
|
74 |
}
|