jwieting commited on
Commit
3f8b1a6
1 Parent(s): 6bb663d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +27 -0
config.json CHANGED
@@ -9,5 +9,32 @@
9
  "num_hidden_layers": 0,
10
  "trigram_input": false,
11
  "type_vocab_size": 1,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  "vocab_size": 82983
13
  }
 
9
  "num_hidden_layers": 0,
10
  "trigram_input": false,
11
  "type_vocab_size": 1,
12
+ "attention_probs_dropout_prob": 0.1,
13
+ "classifier_activation": false,
14
+ "classifier_dropout": null,
15
+ "embedding_size": 1024,
16
+ "hidden_act": "relu",
17
+ "hidden_dropout_prob": 0.0,
18
+ "hidden_size": 1024,
19
+ "initializer_range": 0.02,
20
+ "intermediate_size": 512,
21
+ "intra_bottleneck_size": 128,
22
+ "key_query_shared_bottleneck": true,
23
+ "layer_norm_eps": 1e-12,
24
+ "max_position_embeddings": 512,
25
+ "model_type": "mobilebert",
26
+ "normalization_type": "no_norm",
27
+ "num_attention_heads": 4,
28
+ "num_feedforward_networks": 4,
29
+ "num_hidden_layers": 0,
30
+ "pad_token_id": 0,
31
+ "tokenizer_class": "ReformerTokenizerFast",
32
+ "torch_dtype": "float32",
33
+ "transformers_version": "4.24.0",
34
+ "trigram_input": false,
35
+ "true_hidden_size": 128,
36
+ "type_vocab_size": 1,
37
+ "use_bottleneck": true,
38
+ "use_bottleneck_attention": false,
39
  "vocab_size": 82983
40
  }