{ "add_prefix_space": false, "added_tokens_decoder": { "0": { "content": "[CLS]", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "1": { "content": "[SEP]", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "2": { "content": "[BOS]", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "3": { "content": "[MASK]", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "4": { "content": "[PAD]", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "6": { "content": "[UNK]", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "auto_map": { "AutoTokenizer": [ "LongSafari/hyenadna-large-1m-seqlen-hf--tokenization_hyena.HyenaDNATokenizer", null ] }, "bos_token": "[BOS]", "clean_up_tokenization_spaces": true, "cls_token": "[CLS]", "eos_token": "[SEP]", "mask_token": "[MASK]", "model_max_length": 1000002, "pad_token": "[PAD]", "padding_side": "left", "sep_token": "[SEP]", "tokenizer_class": "HyenaDNATokenizer", "unk_token": "[UNK]" }