{"errors": "replace", "bos_token": "", "eos_token": "", "sep_token": "", "cls_token": "", "unk_token": "", "pad_token": "", "mask_token": "", "add_prefix_space": false, "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "NbAiLab/nb-roberta-base", "tokenizer_class": "RobertaTokenizer"}