nekozutranslate_small / special_tokens_map.json
farihdzaky's picture
Upload tokenizer
5a90304 verified
raw
history blame
537 Bytes
{
"additional_special_tokens": [
"<|im_start|>",
"<|im_end|>"
],
"bos_token": {
"content": "<|im_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|im_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|PAD_TOKEN|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": "�"
}