configs
Browse files- config.json +3 -0
- tokenizer_config.json +4 -1
config.json
CHANGED
@@ -2,6 +2,9 @@
|
|
2 |
"architectures": [
|
3 |
"DebertaV2ForTokenClassification"
|
4 |
],
|
|
|
|
|
|
|
5 |
"attention_probs_dropout_prob": 0.1,
|
6 |
"hidden_act": "gelu",
|
7 |
"hidden_dropout_prob": 0.1,
|
|
|
2 |
"architectures": [
|
3 |
"DebertaV2ForTokenClassification"
|
4 |
],
|
5 |
+
"auto_map": {
|
6 |
+
"AutoTokenizer": "char_tokenizer.CharTokenizer"
|
7 |
+
},
|
8 |
"attention_probs_dropout_prob": 0.1,
|
9 |
"hidden_act": "gelu",
|
10 |
"hidden_dropout_prob": 0.1,
|
tokenizer_config.json
CHANGED
@@ -3,5 +3,8 @@
|
|
3 |
"eos_token": "[EOS]",
|
4 |
"pad_token": "[PAD]",
|
5 |
"tokenizer_class": "CharTokenizer",
|
6 |
-
"unk_token": "[UNK]"
|
|
|
|
|
|
|
7 |
}
|
|
|
3 |
"eos_token": "[EOS]",
|
4 |
"pad_token": "[PAD]",
|
5 |
"tokenizer_class": "CharTokenizer",
|
6 |
+
"unk_token": "[UNK]",
|
7 |
+
"auto_map": {
|
8 |
+
"AutoTokenizer": "char_tokenizer.CharTokenizer"
|
9 |
+
}
|
10 |
}
|