Spaces:
Sleeping
Sleeping
Jalalkhan912
commited on
Commit
•
441ee20
1
Parent(s):
c3b17cb
Update app.py
Browse files
app.py
CHANGED
@@ -35,7 +35,7 @@ What was going on?
|
|
35 |
output = tokenizer.decode(
|
36 |
model.generate(
|
37 |
inputs["input_ids"],
|
38 |
-
max_new_tokens=
|
39 |
)[0],
|
40 |
skip_special_tokens=True
|
41 |
)
|
@@ -77,7 +77,7 @@ def one_shot(example_indices_full,my_example):
|
|
77 |
output = tokenizer.decode(
|
78 |
model.generate(
|
79 |
inputs["input_ids"],
|
80 |
-
max_new_tokens=
|
81 |
)[0],
|
82 |
skip_special_tokens=True
|
83 |
)
|
@@ -89,7 +89,7 @@ def few_shot(example_indices_full_few_shot,my_example):
|
|
89 |
output = tokenizer.decode(
|
90 |
model.generate(
|
91 |
inputs["input_ids"],
|
92 |
-
max_new_tokens=
|
93 |
)[0],
|
94 |
skip_special_tokens=True
|
95 |
)
|
|
|
35 |
output = tokenizer.decode(
|
36 |
model.generate(
|
37 |
inputs["input_ids"],
|
38 |
+
max_new_tokens=50
|
39 |
)[0],
|
40 |
skip_special_tokens=True
|
41 |
)
|
|
|
77 |
output = tokenizer.decode(
|
78 |
model.generate(
|
79 |
inputs["input_ids"],
|
80 |
+
max_new_tokens=50
|
81 |
)[0],
|
82 |
skip_special_tokens=True
|
83 |
)
|
|
|
89 |
output = tokenizer.decode(
|
90 |
model.generate(
|
91 |
inputs["input_ids"],
|
92 |
+
max_new_tokens=50
|
93 |
)[0],
|
94 |
skip_special_tokens=True
|
95 |
)
|