File size: 1,237 Bytes
d35bd62 6f35685 c8bd869 6f35685 c8bd869 6f35685 c8bd869 6f35685 c8bd869 6f35685 c8bd869 6f35685 cdda0d8 6f35685 cdda0d8 6f35685 cdda0d8 6f35685 cdda0d8 6f35685 c8bd869 6f35685 d35bd62 6f35685 d35bd62 6f35685 3c4fd25 d35bd62 6f35685 d35bd62 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
{
"add_prefix_space": false,
"added_tokens_decoder": {
"0": {
"content": "<s>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"1": {
"content": "<pad>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"2": {
"content": "</s>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"3": {
"content": "<unk>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
},
"50264": {
"content": "<mask>",
"lstrip": true,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": true
}
},
"bos_token": "<s>",
"clean_up_tokenization_spaces": true,
"cls_token": "<s>",
"eos_token": "</s>",
"errors": "replace",
"mask_token": "<mask>",
"model_max_length": 128,
"pad_token": "<pad>",
"return_tensors": "pt",
"sep_token": "</s>",
"tokenizer_class": "BartTokenizer",
"trim_offsets": true,
"unk_token": "<unk>"
}
|