Spaces:
Runtime error
Runtime error
import gradio as gr | |
import torch | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
def generate_text(prompt, style): | |
model_name = "nomic-ai/gpt4all-13b-snoozy" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
full_prompt = f"{prompt} Schreibe die Antwort im Stil von {style}." | |
inputs = tokenizer.encode(full_prompt, return_tensors='pt') | |
outputs = model.generate(inputs, max_length=150, num_return_sequences=1, no_repeat_ngram_size=2) | |
generated = outputs[:,inputs.shape[-1]:] | |
result = tokenizer.decode(generated[0], skip_special_tokens=True) | |
return result | |
styles = ["eine formelle E-Mail", "eine Kurzgeschichte", "ein Gedicht", "ein wissenschaftlicher Bericht", "eine Zeitungsartikel"] | |
css = """ | |
body { | |
background-color: #f0f0f0; | |
color: #333; | |
} | |
.gradio-input, .gradio-output { | |
background-color: #fff; | |
color: #333; | |
} | |
""" | |
iface = gr.Interface(fn=generate_text, inputs=["textbox", gr.inputs.Dropdown(choices=styles)], outputs="text", css=css) | |
iface.launch() | |