personaGPT / tokenizer_config.json
af1tang's picture
weights update
7b13971
raw
history blame contribute delete
789 Bytes
{"errors": "replace", "unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "pad_token": "<|endoftext|>", "cls_token": "<|cls|>", "sep_token": "<|sep|>", "special_tokens_map_file": null, "full_tokenizer_file": null, "tokenizer_file": "/home/af1tang/convogym/checkpoint/model/tokenizer.json", "name_or_path": "/home/af1tang/convogym/checkpoint/model/", "tokenizer_class": "GPT2Tokenizer"}