Galvanotechnik / app.py
farhananis005's picture
Update app.py
3a70a3d verified
import os
import openai
os.environ["TOKENIZERS_PARALLELISM"] = "false"
os.environ["OPENAI_API_KEY"]
global agent
def create_agent():
from langchain.chat_models import ChatOpenAI
from langchain.chains.conversation.memory import ConversationSummaryBufferMemory
from langchain.chains import ConversationChain
global agent
llm = ChatOpenAI(model_name="gpt-4o")
memory = ConversationSummaryBufferMemory(llm=llm, max_token_limit=1000)
agent = ConversationChain(llm=llm, memory=memory, verbose=True)
return "Successful!"
def formatted_response(docs, question, response, state):
formatted_output = response + "\n\nSources"
for i, doc in enumerate(docs):
source_info = doc.metadata.get("source", "Unknown source")
page_info = doc.metadata.get("page", None)
doc_name = source_info.split("/")[-1].strip()
if page_info is not None:
formatted_output += f"\n{doc_name}\tpage no {page_info}"
else:
formatted_output += f"\n{doc_name}"
state.append((question, formatted_output))
return state, state
def search_docs(prompt, question, state):
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.callbacks import get_openai_callback
global agent
agent = agent
state = state or []
embeddings = OpenAIEmbeddings()
docs_db = FAISS.load_local("/home/user/app/docs_db/", embeddings, allow_dangerous_deserialization=True)
docs = docs_db.similarity_search(question)
prompt += "\n\n"
prompt += question
prompt += "\n\n"
prompt += str(docs)
with get_openai_callback() as cb:
response = agent.predict(input=prompt)
print(cb)
return formatted_response(docs, question, response, state)
import gradio as gr
css = """
.col{
max-width: 75%;
margin: 0 auto;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
"""
with gr.Blocks(css=css) as demo:
gr.Markdown("## <center>All in One Document Chatting App</center>")
with gr.Tab("Chat With Your Documents"):
with gr.Column(elem_classes="col"):
with gr.Tab("Upload and Process Documents"):
with gr.Column():
# docs_upload_input = gr.Files(label="Upload File(s)")
# docs_upload_button = gr.Button("Upload")
# docs_upload_output = gr.Textbox(label="Output")
# docs_process_button = gr.Button("Process")
# docs_process_output = gr.Textbox(label="Output")
create_agent_button = gr.Button("Create Agent")
create_agent_output = gr.Textbox(label="Output")
# gr.ClearButton([docs_upload_input, docs_upload_output, docs_process_output, create_agent_output])
gr.ClearButton([create_agent_output])
with gr.Tab("Query Documents"):
with gr.Column():
docs_prompt_input = gr.Textbox(label="Custom Prompt")
docs_chatbot = gr.Chatbot(label="Chats")
docs_state = gr.State()
docs_search_input = gr.Textbox(label="Question")
docs_search_button = gr.Button("Search")
gr.ClearButton([docs_prompt_input, docs_search_input])
########################################################################################################
# docs_upload_button.click(save_docs, inputs=docs_upload_input, outputs=docs_upload_output)
# docs_process_button.click(process_docs, inputs=None, outputs=docs_process_output)
create_agent_button.click(create_agent, inputs=None, outputs=create_agent_output)
docs_search_button.click(
search_docs,
inputs=[docs_prompt_input, docs_search_input, docs_state],
outputs=[docs_chatbot, docs_state],
)
########################################################################################################
demo.queue()
demo.launch()