Update app.py
Browse files
app.py
CHANGED
|
@@ -8,6 +8,22 @@ def generate_text(prompt, length=50, temperature=0.7, seed=42):
|
|
| 8 |
output = generator(prompt, max_length=length, do_sample=True, temperature=temperature)
|
| 9 |
return output[0]['generated_text']
|
| 10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
inputs = gr.inputs.Textbox(lines=5, label="Prompt")
|
| 12 |
outputs = gr.outputs.Textbox(label="Output Text")
|
| 13 |
temperature_slider = gr.inputs.Slider(minimum=0.1, maximum=1.5, default=0.7, label="Temperature")
|
|
|
|
| 8 |
output = generator(prompt, max_length=length, do_sample=True, temperature=temperature)
|
| 9 |
return output[0]['generated_text']
|
| 10 |
|
| 11 |
+
import gradio as gr
|
| 12 |
+
from transformers import pipeline, set_seed
|
| 13 |
+
import logging
|
| 14 |
+
|
| 15 |
+
logging.basicConfig(level=logging.INFO)
|
| 16 |
+
|
| 17 |
+
def generate_text(prompt, length=50, temperature=0.7, seed=42):
|
| 18 |
+
try:
|
| 19 |
+
set_seed(seed)
|
| 20 |
+
generator = pipeline('text-generation', model='flax-community/miniLM-L6-h384-uncased', device=0)
|
| 21 |
+
output = generator(prompt, max_length=length, do_sample=True, temperature=temperature)
|
| 22 |
+
return output[0]['generated_text']
|
| 23 |
+
except Exception as e:
|
| 24 |
+
logging.error(f"Error generating text: {e}")
|
| 25 |
+
return "Error generating text. Please try again later."
|
| 26 |
+
|
| 27 |
inputs = gr.inputs.Textbox(lines=5, label="Prompt")
|
| 28 |
outputs = gr.outputs.Textbox(label="Output Text")
|
| 29 |
temperature_slider = gr.inputs.Slider(minimum=0.1, maximum=1.5, default=0.7, label="Temperature")
|