Spaces:
Running
on
Zero
Running
on
Zero
Added a radio box to choose `turna_model_version`
Browse files
app.py
CHANGED
@@ -128,8 +128,9 @@ def categorize(input):
|
|
128 |
@spaces.GPU
|
129 |
def turna(input, max_new_tokens, length_penalty,
|
130 |
top_k, top_p, temp, num_beams,
|
131 |
-
do_sample, no_repeat_ngram_size, repetition_penalty):
|
132 |
-
|
|
|
133 |
input = f"[S2S] {input}<EOS>"
|
134 |
|
135 |
return turna(input, max_new_tokens = max_new_tokens, length_penalty=length_penalty,
|
@@ -317,6 +318,7 @@ with gr.Blocks(theme="abidlabs/Lime") as demo:
|
|
317 |
num_beams = gr.Slider(label = "Number of beams", minimum=1,
|
318 |
maximum=10, value=3)
|
319 |
do_sample = gr.Radio(choices = [True, False], value = True, label = "Sampling")
|
|
|
320 |
with gr.Column():
|
321 |
text_gen_input = gr.Textbox(label="Text Generation Input")
|
322 |
|
@@ -324,10 +326,10 @@ with gr.Blocks(theme="abidlabs/Lime") as demo:
|
|
324 |
text_gen_output = gr.Textbox(label="Text Generation Output")
|
325 |
text_gen_submit.click(turna, inputs=[text_gen_input, max_new_tokens, length_penalty,
|
326 |
top_k, top_p, temp, num_beams,
|
327 |
-
do_sample, no_repeat_ngram_size, repetition_penalty], outputs=text_gen_output)
|
328 |
text_gen_example = [["Bir varmış, bir yokmuş, evvel zaman içinde, kalbur saman içinde, uzak diyarların birinde bir turna"]]
|
329 |
text_gen_examples = gr.Examples(examples = text_gen_example, inputs = [text_gen_input, max_new_tokens, length_penalty,
|
330 |
-
top_k, top_p, temp, num_beams, do_sample, no_repeat_ngram_size, repetition_penalty], outputs=text_gen_output, fn=turna)
|
331 |
|
332 |
gr.Markdown(CITATION)
|
333 |
|
|
|
128 |
@spaces.GPU
|
129 |
def turna(input, max_new_tokens, length_penalty,
|
130 |
top_k, top_p, temp, num_beams,
|
131 |
+
do_sample, no_repeat_ngram_size, repetition_penalty, turna_model_version):
|
132 |
+
|
133 |
+
turna = pipeline(model=f"boun-tabi-LMG/{turna_model_version}", device=0)
|
134 |
input = f"[S2S] {input}<EOS>"
|
135 |
|
136 |
return turna(input, max_new_tokens = max_new_tokens, length_penalty=length_penalty,
|
|
|
318 |
num_beams = gr.Slider(label = "Number of beams", minimum=1,
|
319 |
maximum=10, value=3)
|
320 |
do_sample = gr.Radio(choices = [True, False], value = True, label = "Sampling")
|
321 |
+
turna_model_version = gr.Radio(choices = ["TURNA", "TURNA-2850K", "TURNA-4350K"], value = "TURNA", label = "Choose TURNA model version")
|
322 |
with gr.Column():
|
323 |
text_gen_input = gr.Textbox(label="Text Generation Input")
|
324 |
|
|
|
326 |
text_gen_output = gr.Textbox(label="Text Generation Output")
|
327 |
text_gen_submit.click(turna, inputs=[text_gen_input, max_new_tokens, length_penalty,
|
328 |
top_k, top_p, temp, num_beams,
|
329 |
+
do_sample, no_repeat_ngram_size, repetition_penalty, turna_model_version], outputs=text_gen_output)
|
330 |
text_gen_example = [["Bir varmış, bir yokmuş, evvel zaman içinde, kalbur saman içinde, uzak diyarların birinde bir turna"]]
|
331 |
text_gen_examples = gr.Examples(examples = text_gen_example, inputs = [text_gen_input, max_new_tokens, length_penalty,
|
332 |
+
top_k, top_p, temp, num_beams, do_sample, no_repeat_ngram_size, repetition_penalty, turna_model_version], outputs=text_gen_output, fn=turna)
|
333 |
|
334 |
gr.Markdown(CITATION)
|
335 |
|