ailai / app.py
micrem73's picture
Duplicate from azaninello/ailai
a9db475
raw
history blame
517 Bytes
import gradio as gr
import transformers
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
shroom_generator = pipeline("text-generation", model=AutoModelForCausalLM.from_pretrained('azaninello/gpt2-finetuned-shrooms'), tokenizer="gpt2")
def generator(scrivi_un_titolo = ""):
shroom_result = shroom_generator(f"<|HEADLINE|>{scrivi_un_titolo}<|TEXT|>", max_length=420)
return shroom_result[0]["generated_text"]
iface = gr.Interface(fn=generator, inputs="text", outputs="text")
iface.launch()