Upload tokenizer
Browse files- added_tokens.json +1 -2
- special_tokens_map.json +0 -7
- tokenizer.json +0 -0
- tokenizer_config.json +1 -9
added_tokens.json
CHANGED
@@ -36,6 +36,5 @@
|
|
36 |
" ": 50260,
|
37 |
" ": 50259,
|
38 |
" ": 50258,
|
39 |
-
" ": 50257
|
40 |
-
"[PAD]": 50295
|
41 |
}
|
|
|
36 |
" ": 50260,
|
37 |
" ": 50259,
|
38 |
" ": 50258,
|
39 |
+
" ": 50257
|
|
|
40 |
}
|
special_tokens_map.json
CHANGED
@@ -13,13 +13,6 @@
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
16 |
-
"pad_token": {
|
17 |
-
"content": "[PAD]",
|
18 |
-
"lstrip": false,
|
19 |
-
"normalized": false,
|
20 |
-
"rstrip": false,
|
21 |
-
"single_word": false
|
22 |
-
},
|
23 |
"unk_token": {
|
24 |
"content": "<|endoftext|>",
|
25 |
"lstrip": false,
|
|
|
13 |
"rstrip": false,
|
14 |
"single_word": false
|
15 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
"unk_token": {
|
17 |
"content": "<|endoftext|>",
|
18 |
"lstrip": false,
|
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -312,21 +312,13 @@
|
|
312 |
"rstrip": false,
|
313 |
"single_word": false,
|
314 |
"special": false
|
315 |
-
},
|
316 |
-
"50295": {
|
317 |
-
"content": "[PAD]",
|
318 |
-
"lstrip": false,
|
319 |
-
"normalized": false,
|
320 |
-
"rstrip": false,
|
321 |
-
"single_word": false,
|
322 |
-
"special": true
|
323 |
}
|
324 |
},
|
325 |
"bos_token": "<|endoftext|>",
|
326 |
"clean_up_tokenization_spaces": true,
|
327 |
"eos_token": "<|endoftext|>",
|
328 |
"model_max_length": 2048,
|
329 |
-
"
|
330 |
"tokenizer_class": "CodeGenTokenizer",
|
331 |
"unk_token": "<|endoftext|>"
|
332 |
}
|
|
|
312 |
"rstrip": false,
|
313 |
"single_word": false,
|
314 |
"special": false
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
315 |
}
|
316 |
},
|
317 |
"bos_token": "<|endoftext|>",
|
318 |
"clean_up_tokenization_spaces": true,
|
319 |
"eos_token": "<|endoftext|>",
|
320 |
"model_max_length": 2048,
|
321 |
+
"return_token_type_ids": false,
|
322 |
"tokenizer_class": "CodeGenTokenizer",
|
323 |
"unk_token": "<|endoftext|>"
|
324 |
}
|