GPT-Neo-125M
/
experiments
/2023-02-21-b0010c97cb1f06debca911602ea05b6ff85a8270fb9487d27b3d52eb4eb29e9e
/final_checkpoint
/tokenizer
/tokenizer_config.json
{ | |
"add_bos_token": false, | |
"add_prefix_space": false, | |
"bos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"eos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"errors": "replace", | |
"model_max_length": 2048, | |
"name_or_path": "EleutherAI/gpt-neo-125M", | |
"pad_token": null, | |
"special_tokens_map_file": null, | |
"tokenizer_class": "GPT2Tokenizer", | |
"unk_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
} | |
} | |