Spaces:
Runtime error
Runtime error
File size: 1,408 Bytes
4fd8256 db6849b 4fd8256 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
# -*- coding: utf-8 -*-
import numpy as np
import streamlit as st
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
st.set_page_config(page_title='KoQuillBot', layout='wide', initial_sidebar_state='expanded')
tokenizer = AutoTokenizer.from_pretrained("QuoQA-NLP/KE-T5-Ko2En-Base")
ko2en_model = AutoModelForSeq2SeqLM.from_pretrained("QuoQA-NLP/KE-T5-Ko2En-Base")
en2ko_model = AutoModelForSeq2SeqLM.from_pretrained("QuoQA-NLP/KE-T5-En2Ko-Base")
st.title("🤖 KoQuillBot")
src_text = st.text_area("바꾸고 싶은 문장을 입력하세요:",height=None,max_chars=None,key=None,help="Enter your text here")
backtranslated = ""
if st.button('문장 변환'):
if src_text == "":
st.warning('Please **enter text** for translation')
else:
translated = ko2en_model.generate(
**tokenizer([src_text], return_tensors="pt", padding=True, max_length=64,),
max_length=64,
num_beams=5,
repetition_penalty=1.3,
no_repeat_ngram_size=3,
num_return_sequences=1,
)
backtranslated = en2ko_model.generate(
**tokenizer([translated], return_tensors="pt", padding=True, max_length=64,),
max_length=64,
num_beams=5,
repetition_penalty=1.3,
no_repeat_ngram_size=3,
num_return_sequences=1,
)
else:
pass
print(backtranslated)
st.write(backtranslated)
|