Transformers
Inference Endpoints
File size: 235 Bytes
177e208
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
  "clean_up_tokenization_spaces": true,
  "model_max_length": 64,
  "special_tokens": [
    "<s>",
    "<pad>",
    "</s>",
    "<unk>",
    "<mask>",
    "<true>",
    "<false>"
  ],
  "tokenizer_class": "PreTrainedTokenizerFast"
}