File size: 1,488 Bytes
0b540e1
 
5c2d53d
ef5aca4
0b540e1
 
ef5aca4
d38987e
 
5c2d53d
 
 
 
 
 
 
0b540e1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
07024ae
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import sys
import time
import os
import streamlit as st
import concurrent.futures
from random import randint

# initializing session_state
st.session_state['new']=True
if st.session_state.new==True:
     os.system('!pip install torch==1.10.2+cu113 torchvision==0.11.3+cu113 torchaudio===0.10.2+cu113 -f https://download.pytorch.org/whl/cu113/torch_stable.html')
     os.system('!pip install transformers')
     st.session_state.new=False 

from transformers import pipeline, set_seed

generator = pipeline('text-generation', model='openai-gpt')

def generate(initial_text, length=10, return_sequences=1):
    set_seed(randint(1,1000))
    result = generator(initial_text, max_length = length, num_return_sequences = return_sequences)
    return result[0]["generated_text"]

def slice(text, mak_length=10):
    return text[-mak_length:]

def type_text(text):
    for letter in text:
        sys.stdout.write(letter)
        time.sleep(0)

if __name__ == "__main__":
    text = input("Enter something to begin with... ")
    print(".\n.\n.\nGenerating\n.\n.\n.")
    
    for _ in range(50):
        result = generate(text)
        text=slice(result)
        with concurrent.futures.ThreadPoolExecutor() as executor:
            executor.submit(type_text, result.replace(text,""))

    #Streamlit
    st.title("Story AI")
    st.markdown("<h1 style='text-align: center; color: white;'>I can generate intresting stories</h1>", unsafe_allow_html=True)
    st.markdown('')
    st.markdown('')