import streamlit as st from log10.load import log10, log10_session import openai @st.cache_resource def init_log10(): log10(openai) # Example from: https://python.langchain.com/en/latest/use_cases/question_answering.html # Download the state_of_the_union.txt here: https://raw.githubusercontent.com/hwchase17/langchain/master/docs/modules/state_of_the_union.txt # This example requires: pip install chromadb # Load Your Documents from langchain.document_loaders import TextLoader # Create Your Index from langchain.indexes import VectorstoreIndexCreator from langchain.vectorstores import Chroma from langchain.embeddings import OpenAIEmbeddings from langchain.text_splitter import CharacterTextSplitter from langchain.chat_models import ChatOpenAI @st.cache_resource def init_vector_db(): loader = TextLoader('./state_of_the_union.txt') index = VectorstoreIndexCreator( vectorstore_cls=Chroma, embedding=OpenAIEmbeddings(), text_splitter=CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) ).from_loaders([loader]) return index init_log10() index = init_vector_db(); st.title('State of the Union') query = st.text_input("Question:", "What did the president say about Ketanji Brown Jackson?") #answer = index.query_with_sources(query, llm=ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")) # If you want to combine all queries into a single session can comment the 2 lines below and uncomment the answer=... line above with log10_session(): answer = index.query_with_sources(query, llm=ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")) st.write(answer)