import streamlit as st from transformers import pipeline # Load the language model pipeline # You can replace "gpt2" with another model or a locally fine-tuned model as desired @st.cache_resource def load_model(): return pipeline("text-generation", model="gpt2") llm = load_model() # Set up Streamlit columns for layout col1, col2 = st.columns(2) with col1: # User input box for text input user_input = st.text_input("Enter your text:", "") # Static backend text to combine with user input backend_text = "Predefined text: " combined_text = backend_text + user_input # Button to trigger LLM generation if st.button("Generate"): if user_input.strip(): # Ensure input is not empty with st.spinner("Generating response..."): # Generate response from the LLM with some constraints response = llm(combined_text, max_length=100, num_return_sequences=1) # Extract generated text from LLM output output_text = response[0]['generated_text'] else: output_text = "Please provide some input text." with col2: # Display the output in a text area st.text_area("Output:", output_text, height=200, key="output_text") # Copy button (uses Streamlit Components to trigger copying) copy_script = """ """ # Add the script to the page st.markdown(copy_script, unsafe_allow_html=True) # Button to copy the output text if st.button("Copy Output"): # Display the output text in a way accessible to JS st.write(f'', unsafe_allow_html=True)