ksmcg commited on
Commit
7fe01f2
1 Parent(s): 3c772d1

Upload config

Browse files
Files changed (1) hide show
  1. config.json +15 -1
config.json CHANGED
@@ -1,17 +1,29 @@
1
  {
 
 
 
 
 
 
2
  "depth": 12,
 
 
3
  "embed_dim": 384,
4
  "eta": 1.0,
 
5
  "img_size": [
6
  224,
7
  224
8
  ],
9
  "in_chans": 3,
10
  "initializer_range": 1.0,
 
11
  "model_type": "fully_attentional_networks",
 
12
  "num_classes": 1000,
13
  "num_heads": 8,
14
  "patch_size": 16,
 
15
  "se_mlp": true,
16
  "sharpen_attn": false,
17
  "sr_ratio": [
@@ -29,5 +41,7 @@
29
  1
30
  ],
31
  "tokens_norm": true,
32
- "transformers_version": "4.22.0.dev0"
 
 
33
  }
 
1
  {
2
+ "act_layer": null,
3
+ "attn_drop_rate": 0.0,
4
+ "backbone": null,
5
+ "c_head_num": null,
6
+ "channel_dims": null,
7
+ "cls_attn_layers": 2,
8
  "depth": 12,
9
+ "drop_path_rate": 0.0,
10
+ "drop_rate": 0.0,
11
  "embed_dim": 384,
12
  "eta": 1.0,
13
+ "hybrid_patch_size": 2,
14
  "img_size": [
15
  224,
16
  224
17
  ],
18
  "in_chans": 3,
19
  "initializer_range": 1.0,
20
+ "mlp_ratio": 4.0,
21
  "model_type": "fully_attentional_networks",
22
+ "norm_layer": null,
23
  "num_classes": 1000,
24
  "num_heads": 8,
25
  "patch_size": 16,
26
+ "qkv_bias": true,
27
  "se_mlp": true,
28
  "sharpen_attn": false,
29
  "sr_ratio": [
 
41
  1
42
  ],
43
  "tokens_norm": true,
44
+ "transformers_version": "4.22.0.dev0",
45
+ "use_checkpoint": false,
46
+ "use_pos_embed": true
47
  }