fix number of tokens
Browse files
README.md
CHANGED
@@ -87,7 +87,7 @@ for data in tqdm(datasets):
|
|
87 |
outputs = model.generate(
|
88 |
tokenized_input,
|
89 |
attention_mask=attention_mask,
|
90 |
-
max_new_tokens=
|
91 |
do_sample=False,
|
92 |
repetition_penalty=1.2,
|
93 |
pad_token_id=tokenizer.eos_token_id
|
|
|
87 |
outputs = model.generate(
|
88 |
tokenized_input,
|
89 |
attention_mask=attention_mask,
|
90 |
+
max_new_tokens=100,
|
91 |
do_sample=False,
|
92 |
repetition_penalty=1.2,
|
93 |
pad_token_id=tokenizer.eos_token_id
|