deneme-2 / tokenizer_config.json
eminecg's picture
Upload tokenizer
062fe3c
raw
history blame contribute delete
829 Bytes
{
"add_bos_token": false,
"add_prefix_space": false,
"bos_token": {
"__type": "AddedToken",
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"eos_token": {
"__type": "AddedToken",
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"errors": "replace",
"model_max_length": 1000000000000000019884624838656,
"name_or_path": "emre/jurisprudence-textgen-gpt-2",
"pad_token": null,
"special_tokens_map_file": null,
"tokenizer_class": "GPT2Tokenizer",
"tokenizer_file": null,
"unk_token": {
"__type": "AddedToken",
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
}
}