timeki commited on
Commit
57a1ed7
β€’
1 Parent(s): 0c4d82b
app.py CHANGED
@@ -89,13 +89,14 @@ share_client = service.get_share_client(file_share_name)
89
 
90
  user_id = create_user_id()
91
 
 
92
 
93
 
94
  # Create vectorstore and retriever
95
  vectorstore = get_pinecone_vectorstore(embeddings_function)
96
  llm = get_llm(provider="openai",max_tokens = 1024,temperature = 0.0)
97
- reranker = get_reranker("large")
98
- agent = make_graph_agent(llm,vectorstore,reranker)
99
 
100
  # agent = make_graph_agent(llm,vectorstore,reranker)
101
  agent = make_graph_agent(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, reranker=reranker)
@@ -134,7 +135,6 @@ async def chat(query,history,audience,sources,reports,current_graphs):
134
  docs = []
135
  docs_used = True
136
  docs_html = ""
137
- current_graphs = []
138
  output_query = ""
139
  output_language = ""
140
  output_keywords = ""
@@ -142,7 +142,7 @@ async def chat(query,history,audience,sources,reports,current_graphs):
142
  updates = []
143
  start_streaming = False
144
  figures = '<div class="figures-container"> <p> Go to the "Figures" tab at the top of the page to see full size images </p> </div>'
145
-
146
  steps_display = {
147
  "categorize_intent":("πŸ”„οΈ Analyzing user message",True),
148
  "transform_query":("πŸ”„οΈ Thinking step by step to answer the question",True),
@@ -227,13 +227,11 @@ async def chat(query,history,audience,sources,reports,current_graphs):
227
  categories[category] = []
228
  categories[category].append(graph['embedding'])
229
 
230
- # graphs_html = ""
231
  for category, embeddings in categories.items():
232
- # graphs_html += f"<h3>{category}</h3>"
233
- # current_graphs.append(f"<h3>{category}</h3>")
234
  for embedding in embeddings:
235
- current_graphs.append([embedding, category])
236
- # graphs_html += f"<div>{embedding}</div>"
237
 
238
 
239
  except Exception as e:
@@ -248,7 +246,7 @@ async def chat(query,history,audience,sources,reports,current_graphs):
248
  if event["name"] == "categorize_intent" and event["event"] == "on_chain_start":
249
  print("X")
250
 
251
- yield history,docs_html,output_query,output_language,gallery, figures, current_graphs #,output_query,output_keywords
252
 
253
  except Exception as e:
254
  print(event, "has failed")
@@ -337,7 +335,7 @@ async def chat(query,history,audience,sources,reports,current_graphs):
337
 
338
 
339
 
340
- yield history,docs_html,output_query,output_language,gallery, figures#,output_query,output_keywords
341
 
342
 
343
  def save_feedback(feed: str, user_id):
@@ -423,11 +421,7 @@ def save_graph(saved_graphs_state, embedding, category):
423
  return saved_graphs_state, gr.Button("Graph Saved")
424
 
425
 
426
- with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme,elem_id = "main-component") as demo:
427
 
428
- # chat_completed_state = gr.State(0)
429
- # current_graphs = gr.State([])
430
- # saved_graphs = gr.State({})
431
  with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme,elem_id = "main-component") as demo:
432
  chat_completed_state = gr.State(0)
433
  current_graphs = gr.State([])
@@ -527,10 +521,11 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
527
  output_language = gr.Textbox(label="Language",show_label = True,elem_id = "language",lines = 1,interactive = False)
528
 
529
 
530
- with gr.Tab("Recommended content", elem_id="tab-recommended_content", id=3) as recommended_content_tab:
531
  # placeholder_message = gr.HTML("<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>")
532
  graphs_container = gr.HTML("<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>")
533
  current_graphs.change(lambda x : x, inputs=[current_graphs], outputs=[graphs_container])
 
534
 
535
  # @gr.render(inputs=[current_graphs])
536
  # def display_default_recommended(current_graphs):
 
89
 
90
  user_id = create_user_id()
91
 
92
+ vectorstore_graphs = Chroma(persist_directory="/home/tim/ai4s/climate_qa/climate-question-answering/data/vectorstore_owid", embedding_function=embeddings_function) # TODO make it api call
93
 
94
 
95
  # Create vectorstore and retriever
96
  vectorstore = get_pinecone_vectorstore(embeddings_function)
97
  llm = get_llm(provider="openai",max_tokens = 1024,temperature = 0.0)
98
+ reranker = get_reranker("nano")
99
+ # agent = make_graph_agent(llm,vectorstore,reranker)
100
 
101
  # agent = make_graph_agent(llm,vectorstore,reranker)
102
  agent = make_graph_agent(llm=llm, vectorstore_ipcc=vectorstore, vectorstore_graphs=vectorstore_graphs, reranker=reranker)
 
135
  docs = []
136
  docs_used = True
137
  docs_html = ""
 
138
  output_query = ""
139
  output_language = ""
140
  output_keywords = ""
 
142
  updates = []
143
  start_streaming = False
144
  figures = '<div class="figures-container"> <p> Go to the "Figures" tab at the top of the page to see full size images </p> </div>'
145
+ graphs_html = ""
146
  steps_display = {
147
  "categorize_intent":("πŸ”„οΈ Analyzing user message",True),
148
  "transform_query":("πŸ”„οΈ Thinking step by step to answer the question",True),
 
227
  categories[category] = []
228
  categories[category].append(graph['embedding'])
229
 
230
+
231
  for category, embeddings in categories.items():
232
+ graphs_html += f"<h3>{category}</h3>"
 
233
  for embedding in embeddings:
234
+ graphs_html += f"<div>{embedding}</div>"
 
235
 
236
 
237
  except Exception as e:
 
246
  if event["name"] == "categorize_intent" and event["event"] == "on_chain_start":
247
  print("X")
248
 
249
+ yield history, docs_html, output_query, output_language, gallery, figures, graphs_html #,output_query,output_keywords
250
 
251
  except Exception as e:
252
  print(event, "has failed")
 
335
 
336
 
337
 
338
+ yield history, docs_html, output_query, output_language, gallery, figures, graphs_html#,output_query,output_keywords
339
 
340
 
341
  def save_feedback(feed: str, user_id):
 
421
  return saved_graphs_state, gr.Button("Graph Saved")
422
 
423
 
 
424
 
 
 
 
425
  with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme,elem_id = "main-component") as demo:
426
  chat_completed_state = gr.State(0)
427
  current_graphs = gr.State([])
 
521
  output_language = gr.Textbox(label="Language",show_label = True,elem_id = "language",lines = 1,interactive = False)
522
 
523
 
524
+ with gr.Tab("Recommended content", elem_id="tab-recommended_content", id=4) as recommended_content_tab:
525
  # placeholder_message = gr.HTML("<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>")
526
  graphs_container = gr.HTML("<h2>There are no graphs to be displayed at the moment. Try asking another question.</h2>")
527
  current_graphs.change(lambda x : x, inputs=[current_graphs], outputs=[graphs_container])
528
+ gr.Textbox(current_graphs.value)
529
 
530
  # @gr.render(inputs=[current_graphs])
531
  # def display_default_recommended(current_graphs):
sandbox/20240702 - CQA - Graph Functionality.ipynb CHANGED
The diff for this file is too large to render. See raw diff