change max_new_tokens
Browse files
README.md
CHANGED
@@ -107,7 +107,7 @@ for data in tqdm(datasets):
|
|
107 |
outputs = model.generate(
|
108 |
tokenized_input,
|
109 |
attention_mask=attention_mask,
|
110 |
-
max_new_tokens=
|
111 |
do_sample=False,
|
112 |
repetition_penalty=1.2,
|
113 |
pad_token_id=tokenizer.eos_token_id
|
|
|
107 |
outputs = model.generate(
|
108 |
tokenized_input,
|
109 |
attention_mask=attention_mask,
|
110 |
+
max_new_tokens=1024,
|
111 |
do_sample=False,
|
112 |
repetition_penalty=1.2,
|
113 |
pad_token_id=tokenizer.eos_token_id
|