law-glm-10b / tokenizer_config.json
law-llm's picture
Upload tokenizer
b0ce0a4
raw
history blame
573 Bytes
{
"add_prefix_space": false,
"additional_special_tokens": [
"<|startofpiece|>",
"<|endofpiece|>",
"[gMASK]",
"[sMASK]"
],
"auto_map": {
"AutoTokenizer": [
"THUDM/glm-10b-chinese--tokenization_glm.GLMChineseTokenizer",
null
]
},
"clean_up_tokenization_spaces": true,
"cls_token": "[CLS]",
"eos_token": "<|endoftext|>",
"mask_token": "[MASK]",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<|endoftext|>",
"tokenizer_class": "GLMChineseTokenizer",
"unk_token": "[UNK]",
"use_fast": false
}