petrojm commited on
Commit
aa94ed8
1 Parent(s): 5ab5b15

changes app

Browse files
Files changed (1) hide show
  1. app.py +11 -28
app.py CHANGED
@@ -14,22 +14,8 @@ from utils.vectordb.vector_db import VectorDb
14
  CONFIG_PATH = os.path.join(current_dir,'config.yaml')
15
  PERSIST_DIRECTORY = os.path.join(current_dir,f"data/my-vector-db") # changed to current_dir
16
 
17
- #class ChatState:
18
- # def __init__(self):
19
- # self.conversation = None
20
- # self.chat_history = []
21
- # self.show_sources = True
22
- # self.sources_history = []
23
- # self.vectorstore = None
24
- # self.input_disabled = True
25
- # self.document_retrieval = None
26
-
27
  chat_history = gr.State()
28
  chat_history = []
29
- vectorstore = gr.State()
30
- document_retrieval = gr.State()
31
-
32
- document_retrieval = DocumentRetrieval()
33
 
34
  def handle_userinput(user_question, conversation):
35
  if user_question:
@@ -48,31 +34,25 @@ def handle_userinput(user_question, conversation):
48
  return "An error occurred", ""
49
  #return chat_history, "" #, state.sources_history
50
 
51
- def process_documents(files, conversation, save_location=None):
52
  try:
53
- #for doc in files:
54
  _, _, text_chunks = parse_doc_universal(doc=files)
55
  print(text_chunks)
56
- #text_chunks = chat_state.document_retrieval.parse_doc(files)
57
  embeddings = document_retrieval.load_embedding_model()
58
  collection_name = 'ekr_default_collection' if not config['prod_mode'] else None
59
  vectorstore = document_retrieval.create_vector_store(text_chunks, embeddings, output_db=save_location, collection_name=collection_name)
60
- #vectorstore = vectorstore
61
  document_retrieval.init_retriever(vectorstore)
62
  conversation = document_retrieval.get_qa_retrieval_chain()
63
  #input_disabled = False
64
- return conversation, "Complete! You can now ask questions."
65
  except Exception as e:
66
- return conversation, f"An error occurred while processing: {str(e)}"
67
 
68
- def reset_conversation():
69
  chat_history = []
70
- #chat_state.sources_history = []
71
  return chat_history, ""
72
 
73
- def show_selection(model):
74
- return f"You selected: {model}"
75
-
76
  # Read config file
77
  with open(CONFIG_PATH, 'r') as yaml_file:
78
  config = yaml.safe_load(yaml_file)
@@ -87,7 +67,9 @@ caution_text = """⚠️ Note: depending on the size of your document, this coul
87
  """
88
 
89
  with gr.Blocks() as demo:
 
90
  conversation = gr.State()
 
91
 
92
  gr.Markdown("# Enterprise Knowledge Retriever",
93
  elem_id="title")
@@ -108,7 +90,7 @@ with gr.Blocks() as demo:
108
  gr.Markdown(caution_text)
109
 
110
  # Preprocessing events
111
- process_btn.click(process_documents, inputs=[docs, conversation], outputs=[conversation, setup_output])
112
  #process_save_btn.click(process_documents, inputs=[file_upload, save_location], outputs=setup_output)
113
  #load_db_btn.click(load_existing_db, inputs=[db_path], outputs=setup_output)
114
 
@@ -122,8 +104,9 @@ with gr.Blocks() as demo:
122
 
123
  # Chatbot events
124
  #msg.submit(handle_userinput, inputs=[msg], outputs=[chatbot, sources_output])
125
- msg.submit(handle_userinput, inputs=[msg, conversation], outputs=[chatbot, msg])
126
- clear_btn.click(reset_conversation, outputs=[chatbot,msg])
 
127
  #show_sources.change(lambda x: gr.update(visible=x), show_sources, sources_output)
128
 
129
  if __name__ == "__main__":
 
14
  CONFIG_PATH = os.path.join(current_dir,'config.yaml')
15
  PERSIST_DIRECTORY = os.path.join(current_dir,f"data/my-vector-db") # changed to current_dir
16
 
 
 
 
 
 
 
 
 
 
 
17
  chat_history = gr.State()
18
  chat_history = []
 
 
 
 
19
 
20
  def handle_userinput(user_question, conversation):
21
  if user_question:
 
34
  return "An error occurred", ""
35
  #return chat_history, "" #, state.sources_history
36
 
37
+ def process_documents(files, document_retrieval, vectorstore, conversation, save_location=None):
38
  try:
39
+ document_retrieval = DocumentRetrieval()
40
  _, _, text_chunks = parse_doc_universal(doc=files)
41
  print(text_chunks)
 
42
  embeddings = document_retrieval.load_embedding_model()
43
  collection_name = 'ekr_default_collection' if not config['prod_mode'] else None
44
  vectorstore = document_retrieval.create_vector_store(text_chunks, embeddings, output_db=save_location, collection_name=collection_name)
 
45
  document_retrieval.init_retriever(vectorstore)
46
  conversation = document_retrieval.get_qa_retrieval_chain()
47
  #input_disabled = False
48
+ return conversation, vectorstore, document_retrieval, "Complete! You can now ask questions."
49
  except Exception as e:
50
+ return conversation, vectorstore, document_retrieval, f"An error occurred while processing: {str(e)}"
51
 
52
+ def reset_conversation(chat_history):
53
  chat_history = []
 
54
  return chat_history, ""
55
 
 
 
 
56
  # Read config file
57
  with open(CONFIG_PATH, 'r') as yaml_file:
58
  config = yaml.safe_load(yaml_file)
 
67
  """
68
 
69
  with gr.Blocks() as demo:
70
+ vectorstore = gr.State()
71
  conversation = gr.State()
72
+ document_retrieval = gr.State()
73
 
74
  gr.Markdown("# Enterprise Knowledge Retriever",
75
  elem_id="title")
 
90
  gr.Markdown(caution_text)
91
 
92
  # Preprocessing events
93
+ process_btn.click(process_documents, inputs=[docs, document_retrieval, vectorstore, conversation], outputs=[conversation, vectorstore, document_retrieval, setup_output], concurrency_limit=10)
94
  #process_save_btn.click(process_documents, inputs=[file_upload, save_location], outputs=setup_output)
95
  #load_db_btn.click(load_existing_db, inputs=[db_path], outputs=setup_output)
96
 
 
104
 
105
  # Chatbot events
106
  #msg.submit(handle_userinput, inputs=[msg], outputs=[chatbot, sources_output])
107
+ msg.submit(handle_userinput, inputs=[msg, conversation], outputs=[chatbot, msg], queue=False)
108
+ clear_btn.click(lambda: [None, ""], inputs=None, outputs=[chatbot, msg], queue=False)
109
+ #clear_btn.click(reset_conversation, inputs=[], outputs=[chatbot,msg])
110
  #show_sources.change(lambda x: gr.update(visible=x), show_sources, sources_output)
111
 
112
  if __name__ == "__main__":