Spaces:
Runtime error
Runtime error
Stefan Dumitrescu
commited on
Commit
β’
76d0859
1
Parent(s):
a654e97
Update
Browse files
app.py
CHANGED
@@ -3,16 +3,27 @@ import streamlit as st
|
|
3 |
|
4 |
from transformers import AutoTokenizer, AutoModelWithLMHead
|
5 |
|
6 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
|
9 |
@st.cache
|
10 |
def load_model(model_name):
|
11 |
-
model = AutoModelWithLMHead.from_pretrained("
|
12 |
return model
|
13 |
|
14 |
-
|
15 |
-
model = load_model("gpt2-base")
|
16 |
|
17 |
|
18 |
def infer(input_ids, max_length, temperature, top_k, top_p):
|
@@ -32,7 +43,7 @@ def infer(input_ids, max_length, temperature, top_k, top_p):
|
|
32 |
default_value = "See how a modern neural network auto-completes your text π€ This site, built by the Hugging Face team, lets you write a whole document directly from your browser, and you can trigger the Transformer anywhere using the Tab key. Its like having a smart machine that completes your thoughts π Get started by typing a custom snippet, check out the repository, or try one of the examples. Have fun!"
|
33 |
|
34 |
# prompts
|
35 |
-
st.title("Write
|
36 |
st.write(
|
37 |
"The almighty king of text generation, GPT-2 comes in four available sizes, only three of which have been publicly made available. Feared for its fake news generation capabilities, it currently stands as the most syntactically coherent model. A direct successor to the original GPT, it reinforces the already established pre-training/fine-tuning killer duo. From the paper: Language Models are Unsupervised Multitask Learners by Alec Radford, Jeffrey Wu, Rewon Child, David Luan, Dario Amodei and Ilya Sutskever.")
|
38 |
|
|
|
3 |
|
4 |
from transformers import AutoTokenizer, AutoModelWithLMHead
|
5 |
|
6 |
+
###################
|
7 |
+
# global variables
|
8 |
+
|
9 |
+
|
10 |
+
###################
|
11 |
+
# page configs and functions
|
12 |
+
|
13 |
+
st.set_page_config(
|
14 |
+
page_title="Romanian Text Generator",
|
15 |
+
page_icon="π·π΄",
|
16 |
+
)
|
17 |
+
|
18 |
+
tokenizer = AutoTokenizer.from_pretrained("dumitrescustefan/gpt-neo-romanian-780m")
|
19 |
|
20 |
|
21 |
@st.cache
|
22 |
def load_model(model_name):
|
23 |
+
model = AutoModelWithLMHead.from_pretrained("dumitrescustefan/gpt-neo-romanian-780m")
|
24 |
return model
|
25 |
|
26 |
+
model = load_model("dumitrescustefan/gpt-neo-romanian-780m")
|
|
|
27 |
|
28 |
|
29 |
def infer(input_ids, max_length, temperature, top_k, top_p):
|
|
|
43 |
default_value = "See how a modern neural network auto-completes your text π€ This site, built by the Hugging Face team, lets you write a whole document directly from your browser, and you can trigger the Transformer anywhere using the Tab key. Its like having a smart machine that completes your thoughts π Get started by typing a custom snippet, check out the repository, or try one of the examples. Have fun!"
|
44 |
|
45 |
# prompts
|
46 |
+
st.title("Write")
|
47 |
st.write(
|
48 |
"The almighty king of text generation, GPT-2 comes in four available sizes, only three of which have been publicly made available. Feared for its fake news generation capabilities, it currently stands as the most syntactically coherent model. A direct successor to the original GPT, it reinforces the already established pre-training/fine-tuning killer duo. From the paper: Language Models are Unsupervised Multitask Learners by Alec Radford, Jeffrey Wu, Rewon Child, David Luan, Dario Amodei and Ilya Sutskever.")
|
49 |
|