bghira commited on
Commit
5ce3dd0
·
verified ·
1 Parent(s): d36d591

Update transformer/config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +1 -0
transformer/config.json CHANGED
@@ -8,6 +8,7 @@
8
  "cross_attention_head_dim": 112,
9
  "dropout": 0.0,
10
  "in_channels": 32,
 
11
  "mlp_ratio": 2.5,
12
  "norm_elementwise_affine": false,
13
  "norm_eps": 1e-06,
 
8
  "cross_attention_head_dim": 112,
9
  "dropout": 0.0,
10
  "in_channels": 32,
11
+ "interpolation_scale": null,
12
  "mlp_ratio": 2.5,
13
  "norm_elementwise_affine": false,
14
  "norm_eps": 1e-06,