ikuyamada commited on
Commit
29efe3b
1 Parent(s): ef02d5c

add id2label and label2id to config.json

Browse files
Files changed (1) hide show
  1. config.json +18 -19
config.json CHANGED
@@ -3,7 +3,6 @@
3
  "LukeForEntityClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
- "bert_model_name": "roberta-large",
7
  "bos_token_id": 0,
8
  "entity_emb_size": 256,
9
  "entity_vocab_size": 500000,
@@ -13,28 +12,28 @@
13
  "hidden_dropout_prob": 0.1,
14
  "hidden_size": 1024,
15
  "id2label": {
16
- "0": "LABEL_0",
17
- "1": "LABEL_1",
18
- "2": "LABEL_2",
19
- "3": "LABEL_3",
20
- "4": "LABEL_4",
21
- "5": "LABEL_5",
22
- "6": "LABEL_6",
23
- "7": "LABEL_7",
24
- "8": "LABEL_8"
25
  },
26
  "initializer_range": 0.02,
27
  "intermediate_size": 4096,
28
  "label2id": {
29
- "LABEL_0": 0,
30
- "LABEL_1": 1,
31
- "LABEL_2": 2,
32
- "LABEL_3": 3,
33
- "LABEL_4": 4,
34
- "LABEL_5": 5,
35
- "LABEL_6": 6,
36
- "LABEL_7": 7,
37
- "LABEL_8": 8
38
  },
39
  "layer_norm_eps": 1e-05,
40
  "max_position_embeddings": 514,
 
3
  "LukeForEntityClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
 
6
  "bos_token_id": 0,
7
  "entity_emb_size": 256,
8
  "entity_vocab_size": 500000,
 
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 1024,
14
  "id2label": {
15
+ "0": "entity",
16
+ "1": "event",
17
+ "2": "group",
18
+ "3": "location",
19
+ "4": "object",
20
+ "5": "organization",
21
+ "6": "person",
22
+ "7": "place",
23
+ "8": "time"
24
  },
25
  "initializer_range": 0.02,
26
  "intermediate_size": 4096,
27
  "label2id": {
28
+ "entity": 0,
29
+ "event": 1,
30
+ "group": 2,
31
+ "location": 3,
32
+ "object": 4,
33
+ "organization": 5,
34
+ "person": 6,
35
+ "place": 7,
36
+ "time": 8
37
  },
38
  "layer_norm_eps": 1e-05,
39
  "max_position_embeddings": 514,