XCLiu commited on
Commit
3a21074
1 Parent(s): cba6946

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -12,7 +12,7 @@
12
  "mlp_ratio": 4.3637,
13
  "norm_type": "layer_norm",
14
  "num_attention_heads": 16,
15
- "num_layers": 40,
16
  "patch_size": 2,
17
  "pooled_projection_dim": 1024,
18
  "sample_size": 128,
 
12
  "mlp_ratio": 4.3637,
13
  "norm_type": "layer_norm",
14
  "num_attention_heads": 16,
15
+ "transformer_num_layers": 40,
16
  "patch_size": 2,
17
  "pooled_projection_dim": 1024,
18
  "sample_size": 128,