Ai-Guru / app.py
enzer1992's picture
Update app.py
5f81f2a verified
raw
history blame
540 Bytes
import gradio as gr
from transformers import GPT2LMHeadModel, GPT2Tokenizer
model_name = "enzer1992/AI-Guru"
tokenizer = GPT2Tokenizer.from_pretrained(model_name)
model = GPT2LMHeadModel.from_pretrained(model_name)
def generate_text(prompt):
inputs = tokenizer.encode(prompt, return_tensors="pt")
outputs = model.generate(inputs, max_length=100, num_return_sequences=1)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
interface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
interface.launch()