Spaces:
Runtime error
Runtime error
import streamlit as st | |
import numpy as np | |
import pandas as pd | |
import os | |
import torch | |
import torch.nn as nn | |
from transformers.activations import get_activation | |
from transformers import AutoTokenizer, AutoModelWithLMHead, AutoModelForCausalLM | |
st.title('GPT2: To see all prompt outlines: https://huggingface.co/BigSalmon/InformalToFormalLincoln46') | |
#device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
number_of_outputs = st.sidebar.slider("Number of Outputs", 50, 350) | |
def get_model(): | |
#model = #AutoModelWithLMHead.from_pretrained("BigSalmon/GPTNeo350MInformalToFormalLincoln6") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/GPTNeo350MInformalToFormalLincoln5") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/GPTNeo350MInformalToFormalLincoln4") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/GPTNeo350MInformalToFormalLincoln3") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/GPTNeo350MInformalToFormalLincoln2") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/GPTNeo350MInformalToFormalLincoln") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/InformalToFormalLincoln24") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/InformalToFormalLincoln25") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/InformalToFormalLincoln26") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/InformalToFormalLincoln27") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/InformalToFormalLincoln28") | |
#model = AutoModelWithLMHead.from_pretrained("BigSalmon/InformalToFormalLincoln29") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln30") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln31") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln32") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln33") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln34") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln35") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln39") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln40") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln44") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln46") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/InformalToFormalLincoln52") | |
#model = AutoModelForCausalLM.from_pretrained("BigSalmon/Points4") | |
tokenizer = AutoTokenizer.from_pretrained("hgw3lss/gpt-j-6B-Buckland") | |
model = AutoModelForCausalLM.from_pretrained("hgw3lss/gpt-j-6B-Buckland") | |
return model, tokenizer | |
model, tokenizer = get_model() | |
g = """informal english: garage band has made people who know nothing about music good at creating music. | |
Translated into the Style of Abraham Lincoln: garage band ( offers the uninitiated in music the ability to produce professional-quality compositions / catapults those for whom music is an uncharted art the ability the realize masterpieces / stimulates music novice's competency to yield sublime arrangements / begets individuals of rudimentary musical talent the proficiency to fashion elaborate suites ). | |
informal english: chrome extensions can make doing regular tasks much easier to get done. | |
*** | |
Translated into the Style of Abraham Lincoln: chrome extensions ( yield the boon of time-saving convenience / ( expedite the ability to / unlock the means to more readily ) accomplish everyday tasks / turbocharges the velocity with which one can conduct their obligations ). | |
informal english: broadband is finally expanding to rural areas, a great development that will thrust them into modern life. | |
Translated into the Style of Abraham Lincoln: broadband is ( ( finally / at last / after years of delay ) arriving in remote locations / springing to life in far-flung outposts / inching into even the most backwater corners of the nation ) that will leap-frog them into the twenty-first century. | |
*** | |
informal english: google translate has made talking to people who do not share your language easier. | |
Translated into the Style of Abraham Lincoln: google translate ( imparts communicability to individuals whose native tongue differs / mitigates the trials of communication across linguistic barriers / hastens the bridging of semantic boundaries / mollifies the complexity of multilingual communication / avails itself to the internationalization of discussion / flexes its muscles to abet intercultural conversation / calms the tides of linguistic divergence ). | |
*** | |
informal english: corn fields are all across illinois, visible once you leave chicago. | |
Translated into the Style of Abraham Lincoln: corn fields ( permeate illinois / span the state of illinois / ( occupy / persist in ) all corners of illinois / line the horizon of illinois / envelop the landscape of illinois ), manifesting themselves visibly as one ventures beyond chicago. | |
*** | |
informal english: """ | |
with st.form(key='my_form'): | |
prompt = st.text_area(label='Enter sentence', value=g) | |
submit_button = st.form_submit_button(label='Submit') | |
if submit_button: | |
with torch.no_grad(): | |
text = tokenizer.encode(prompt) | |
myinput, past_key_values = torch.tensor([text]), None | |
myinput = myinput | |
#myinput= myinput | |
logits, past_key_values = model(myinput, past_key_values = past_key_values, return_dict=False) | |
logits = logits[0,-1] | |
probabilities = torch.nn.functional.softmax(logits) | |
best_logits, best_indices = logits.topk(number_of_outputs) | |
best_words = [tokenizer.decode([idx.item()]) for idx in best_indices] | |
text.append(best_indices[0].item()) | |
best_probabilities = probabilities[best_indices].tolist() | |
words = [] | |
st.write(best_words) | |