{"unk_token": "<|unkoftext|>", "bos_token": "<|beginoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "errors": "replace", "model_max_length": 2048, "special_tokens_map_file": null, "name_or_path": "/cache/stage1_trained_models_100M/pycodegpt/", "pad_token": "<|padoftext|>", "tokenizer_class": "GPT2Tokenizer"} |