michaelfeil commited on
Commit
a985057
1 Parent(s): e9cd469

Update 0_Transformer/config.json

Browse files
Files changed (1) hide show
  1. 0_Transformer/config.json +0 -6
0_Transformer/config.json CHANGED
@@ -8,14 +8,8 @@
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 384,
11
- "id2label": {
12
- "0": "LABEL_0"
13
- },
14
  "initializer_range": 0.02,
15
  "intermediate_size": 1536,
16
- "label2id": {
17
- "LABEL_0": 0
18
- },
19
  "layer_norm_eps": 1e-12,
20
  "max_position_embeddings": 512,
21
  "model_type": "bert",
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 384,
 
 
 
11
  "initializer_range": 0.02,
12
  "intermediate_size": 1536,
 
 
 
13
  "layer_norm_eps": 1e-12,
14
  "max_position_embeddings": 512,
15
  "model_type": "bert",