michaelfeil commited on
Commit
559ec8e
1 Parent(s): fabc9f6

setting max model length to something reasonable, e.g. 4096

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -1746,7 +1746,7 @@
1746
  "bos_token": "<bos>",
1747
  "clean_up_tokenization_spaces": false,
1748
  "eos_token": "<eos>",
1749
- "model_max_length": 1000000000000000019884624838656,
1750
  "pad_token": "<pad>",
1751
  "sp_model_kwargs": {},
1752
  "spaces_between_special_tokens": false,
 
1746
  "bos_token": "<bos>",
1747
  "clean_up_tokenization_spaces": false,
1748
  "eos_token": "<eos>",
1749
+ "model_max_length": 4096,
1750
  "pad_token": "<pad>",
1751
  "sp_model_kwargs": {},
1752
  "spaces_between_special_tokens": false,