vneralla commited on
Commit
1c23bbf
1 Parent(s): 14b9824

Remove special characters

Browse files
Files changed (2) hide show
  1. tokenizer.json +1 -1
  2. vocab.json +1 -1
tokenizer.json CHANGED
@@ -1 +1 @@
1
- PreTrainedTokenizer(name_or_path='', vocab_size=47, model_max_len=1000000000000000019884624838656, is_fast=False, padding_side='right', special_tokens={'bos_token': '<s>', 'eos_token': '</s>', 'unk_token': '[UNK]', 'pad_token': '[PAD]'})
 
1
+ PreTrainedTokenizer(name_or_path='', vocab_size=38, model_max_len=1000000000000000019884624838656, is_fast=False, padding_side='right', truncation_side='right', special_tokens={'bos_token': '<s>', 'eos_token': '</s>', 'unk_token': '[UNK]', 'pad_token': '[PAD]'})
vocab.json CHANGED
@@ -1 +1 @@
1
- {"&": 1, ")": 2, "+": 3, "/": 4, "=": 5, "[": 6, "]": 7, "_": 8, "a": 9, "b": 10, "c": 11, "d": 12, "e": 13, "f": 14, "g": 15, "h": 16, "i": 17, "j": 18, "k": 19, "l": 20, "m": 21, "n": 22, "o": 23, "p": 24, "q": 25, "r": 26, "s": 27, "t": 28, "u": 29, "v": 30, "w": 31, "x": 32, "y": 33, "z": 34, "\u00e0": 35, "\u00e1": 36, "\u00e4": 37, "\u00e5": 38, "\u00f1": 39, "\u00f3": 40, "\u00f6": 41, "\u00fa": 42, "\u00fc": 43, "\u0161": 44, "|": 0, "[UNK]": 45, "[PAD]": 46}
 
1
+ {"a": 1, "b": 2, "c": 3, "d": 4, "e": 5, "f": 6, "g": 7, "h": 8, "i": 9, "j": 10, "k": 11, "l": 12, "m": 13, "n": 14, "o": 15, "p": 16, "q": 17, "r": 18, "s": 19, "t": 20, "u": 21, "v": 22, "w": 23, "x": 24, "y": 25, "z": 26, "\u00e0": 27, "\u00e1": 28, "\u00e4": 29, "\u00e5": 30, "\u00f1": 31, "\u00f6": 32, "\u00fa": 33, "\u00fc": 34, "\u0161": 35, "|": 0, "[UNK]": 36, "[PAD]": 37}