#packages import streamlit as st from langchain.prompts import PromptTemplate from langchain.chains import LLMChain from langchain.llms import ( HuggingFacePipeline, HuggingFaceEndpoint ) from langchain.prompts.chat import( ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate ) # intiaiizing variables model_id = "mistralai/Mixtral-8x7B-Instruct-v0.1" few_shot_example=''' Example 1: User: get me best quotes from the movie The Wizard of Oz Assistant: sure! 1. Toto, I've a feeling we're not in Kansas anymore. 2. And remember, my sentimental friend, that a heart is not judged by how much *you* love; but by how much you are loved by others. 3. Pay no attention to that man behind the curtain. Example 2: User: get the best quotes from the movie pursuit of happiness Assistant: sure! 1. Don't ever let someone tell you that you can't do something. Not even me. You got a dream, you gotta protect it. When people can't do something themselves, they're gonna tell you that you can't do it. You want something, go get it. Period. 2. Others may question your credentials, your papers, your degrees. But what is inside you no one can take from you or tarnish" Example 3: User: get the best quotes from the movie blha blah Assistant: Sorry, I dont know Now, continue: ''' #functions def load_prompt_chain(): try: llm = HuggingFaceEndpoint( repo_id=model_id, max_length=2000, temperature=0.5 ) system_message_prompt=SystemMessagePromptTemplate.from_template(template= '''you are an excellent Assistant AI expert on getting Quotes from hollywood movies, please get the maximum 5 best quotes from the movie {movie_name}, dont assume or hallucinate answers, if you dont know the answer, just say you dont know. \n\n {few_shot_example}''', ) speaker_message_prompt=HumanMessagePromptTemplate.from_template( 'get the best quotes from the movie {movie_name}' ) chat_prompt=ChatPromptTemplate.from_messages([ system_message_prompt,speaker_message_prompt ]) chain=LLMChain(llm=llm,prompt=chat_prompt) return chain except Exception as ex: st.error("Error loading prompt chain.") st.error(str(ex)) return None chain = load_prompt_chain() #session states if 'movie_name' not in st.session_state: st.session_state['movie_name']='' if 'movie_quotes' not in st.session_state: st.session_state['movie_quotes']='' # if 'selected_model' not in st.session_state: # st.session_state['selected_model']='' #page controls st.title('best movie quotes.') col1, col2=st.columns([3,9]) with st.form('movie_quotes_form'): # with col1: # st.session_state['selected_model']=st.selectbox( # 'please select a model', # ('mistralai/Mixtral-8x7B-Instruct-v0.1','meta-llama/Llama-2-7b-chat-hf') # ) # with col2: markdown=st.markdown(st.session_state['movie_quotes'],unsafe_allow_html=True) st.divider() st.session_state['movie_name']=st.text_input(label='enter the movie name', value=st.session_state['movie_name']) submitted=st.form_submit_button(label='get quotes') if submitted and chain: try: message=chain.run({ 'few_shot_example':few_shot_example, 'movie_name':st.session_state['movie_name'] }) st.session_state['movie_quotes']+=message markdown.write(st.session_state['movie_quotes']) except Exception as ex: st.error("Error while generating quotes.") st.error(ex)