import os import gradio as gr from langchain.document_loaders import PyMuPDFLoader from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.vectorstores import Chroma from langchain.embeddings import OpenAIEmbeddings from langchain.chat_models import ChatOpenAI from langchain.chains import RetrievalQA import os os.environ['CURL_CA_BUNDLE'] = '' # Initialize conversation history conversation_history = "" def main(api_key, pdf_path, user_input): global conversation_history # Declare as global to update it os.environ["OPENAI_API_KEY"] = api_key persist_directory = "./storage" loader = PyMuPDFLoader(pdf_path.name) documents = loader.load() text_splitter = RecursiveCharacterTextSplitter(chunk_size=512, chunk_overlap=10) texts = text_splitter.split_documents(documents) embeddings = OpenAIEmbeddings() vectordb = Chroma.from_documents(documents=texts, embedding=embeddings, persist_directory=persist_directory) vectordb.persist() retriever = vectordb.as_retriever(search_kwargs={"k": 3}) llm = ChatOpenAI(model_name='gpt-4') qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever) # Update conversation history with the user's latest question conversation_history += f"User: {user_input}\n" query = f"{conversation_history}###Prompt {user_input}" try: llm_response = qa(query) response_text = llm_response["result"] # Update conversation history with the model's latest answer conversation_history += f"Model: {response_text}\n" return conversation_history # Return the entire conversation history except Exception as err: return f'Exception occurred. Please try again: {str(err)}' iface = gr.Interface( fn=main, inputs=[ gr.inputs.Textbox(label="OpenAI API Key", type="password"), gr.inputs.File(label="Upload PDF"), gr.inputs.Textbox(label="Enter Query") ], outputs="text", live=False, show_submit_button=True, description="Enter your OpenAI API Key, upload a PDF, and enter a query to get a response." ) iface.launch(share=True)