Upload tokenizer
Browse files- special_tokens_map.json +7 -1
- tokenizer_config.json +8 -1
special_tokens_map.json
CHANGED
@@ -13,7 +13,13 @@
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
-
"pad_token":
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
"sep_token": {
|
18 |
"content": "[SEP]",
|
19 |
"lstrip": false,
|
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "[MASK]",
|
18 |
+
"lstrip": true,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
"sep_token": {
|
24 |
"content": "[SEP]",
|
25 |
"lstrip": false,
|
tokenizer_config.json
CHANGED
@@ -949,13 +949,20 @@
|
|
949 |
"cls_token": "[CLS]",
|
950 |
"extra_special_tokens": {},
|
951 |
"mask_token": "[MASK]",
|
|
|
952 |
"model_input_names": [
|
953 |
"input_ids",
|
954 |
"attention_mask"
|
955 |
],
|
956 |
-
"model_max_length":
|
|
|
957 |
"pad_token": "[MASK]",
|
|
|
|
|
958 |
"sep_token": "[SEP]",
|
|
|
959 |
"tokenizer_class": "PreTrainedTokenizerFast",
|
|
|
|
|
960 |
"unk_token": "[UNK]"
|
961 |
}
|
|
|
949 |
"cls_token": "[CLS]",
|
950 |
"extra_special_tokens": {},
|
951 |
"mask_token": "[MASK]",
|
952 |
+
"max_length": 179,
|
953 |
"model_input_names": [
|
954 |
"input_ids",
|
955 |
"attention_mask"
|
956 |
],
|
957 |
+
"model_max_length": 179,
|
958 |
+
"pad_to_multiple_of": null,
|
959 |
"pad_token": "[MASK]",
|
960 |
+
"pad_token_type_id": 0,
|
961 |
+
"padding_side": "right",
|
962 |
"sep_token": "[SEP]",
|
963 |
+
"stride": 0,
|
964 |
"tokenizer_class": "PreTrainedTokenizerFast",
|
965 |
+
"truncation_side": "right",
|
966 |
+
"truncation_strategy": "longest_first",
|
967 |
"unk_token": "[UNK]"
|
968 |
}
|