GPT-Neo-125M
/
experiments
/2023-02-22-5f8754af240241f4c78ba924918b0bfb26f26a17b0e1799c8a046eb3f6dd6e94
/final_checkpoint
/tokenizer
/tokenizer_config.json
{ | |
"add_bos_token": false, | |
"add_prefix_space": false, | |
"bos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"eos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"errors": "replace", | |
"model_max_length": 2048, | |
"name_or_path": "EleutherAI/gpt-neo-125M", | |
"pad_token": null, | |
"special_tokens_map_file": null, | |
"tokenizer_class": "GPT2Tokenizer", | |
"unk_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
} | |
} | |