nlp-lstm-team / app.py
Rzhishchev's picture
Create app.py
4fdd156
raw
history blame
1.2 kB
import streamlit as st
from transformers import GPT2LMHeadModel, GPT2Tokenizer
import torch
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model_path = "/zhvanetsky_model_1"
tokenizer = GPT2Tokenizer.from_pretrained(model_path)
model = GPT2LMHeadModel.from_pretrained(model_path).to(DEVICE)
def generate_text(input_text):
model.eval()
input_ids = tokenizer.encode(input_text, return_tensors="pt").to(DEVICE)
with torch.no_grad():
out = model.generate(input_ids,
do_sample=True,
num_beams=10,
temperature=2.2,
top_p=0.85,
top_k=500,
max_length=100,
no_repeat_ngram_size=3,
num_return_sequences=3,
)
return tokenizer.decode(out[0], skip_special_tokens=True)
st.title("GPT-2 Text Generator")
user_input = st.text_area("Input Text", "Введите ваш текст")
if st.button("Generate"):
generated_output = generate_text(user_input)
st.text_area("Generated Text", generated_output)