Upload config.json
Browse files- config.json +9 -9
config.json
CHANGED
@@ -17,8 +17,16 @@
|
|
17 |
"hidden_act": "gelu",
|
18 |
"hidden_dropout_prob": 0.1,
|
19 |
"hidden_size": 768,
|
|
|
|
|
|
|
|
|
20 |
"initializer_range": 0.02,
|
21 |
"intermediate_size": 3072,
|
|
|
|
|
|
|
|
|
22 |
"layer_norm_eps": 1e-05,
|
23 |
"max_position_embeddings": 514,
|
24 |
"model_type": "xlm-roberta",
|
@@ -32,13 +40,5 @@
|
|
32 |
"transformers_version": "4.17.0",
|
33 |
"type_vocab_size": 1,
|
34 |
"use_cache": true,
|
35 |
-
"vocab_size": 250002
|
36 |
-
"label2id": {
|
37 |
-
"non-hateful": 0,
|
38 |
-
"hateful": 1
|
39 |
-
},
|
40 |
-
"id2label": {
|
41 |
-
"0": "non-hateful",
|
42 |
-
"1": "hateful"
|
43 |
-
}
|
44 |
}
|
|
|
17 |
"hidden_act": "gelu",
|
18 |
"hidden_dropout_prob": 0.1,
|
19 |
"hidden_size": 768,
|
20 |
+
"id2label": {
|
21 |
+
"0": "non-hateful",
|
22 |
+
"1": "hateful"
|
23 |
+
},
|
24 |
"initializer_range": 0.02,
|
25 |
"intermediate_size": 3072,
|
26 |
+
"label2id": {
|
27 |
+
"hateful": 1,
|
28 |
+
"non-hateful": 0
|
29 |
+
},
|
30 |
"layer_norm_eps": 1e-05,
|
31 |
"max_position_embeddings": 514,
|
32 |
"model_type": "xlm-roberta",
|
|
|
40 |
"transformers_version": "4.17.0",
|
41 |
"type_vocab_size": 1,
|
42 |
"use_cache": true,
|
43 |
+
"vocab_size": 250002
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
}
|