fix merge
Browse files- app.py +16 -12
- climateqa/event_handler.py +3 -1
- style.css +5 -0
app.py
CHANGED
@@ -142,6 +142,7 @@ async def chat(query,history,audience,sources,reports,current_graphs):
|
|
142 |
|
143 |
docs = []
|
144 |
used_figures=[]
|
|
|
145 |
docs_html = ""
|
146 |
output_query = ""
|
147 |
output_language = ""
|
@@ -164,7 +165,7 @@ async def chat(query,history,audience,sources,reports,current_graphs):
|
|
164 |
node = event["metadata"]["langgraph_node"]
|
165 |
|
166 |
if event["event"] == "on_chain_end" and event["name"] == "retrieve_documents" :# when documents are retrieved
|
167 |
-
docs, docs_html, history, used_documents = handle_retrieved_documents(event, history, used_documents)
|
168 |
|
169 |
|
170 |
elif event["name"] in steps_display.keys() and event["event"] == "on_chain_start": #display steps
|
@@ -186,7 +187,7 @@ async def chat(query,history,audience,sources,reports,current_graphs):
|
|
186 |
if event["name"] == "categorize_intent" and event["event"] == "on_chain_start":
|
187 |
print("X")
|
188 |
|
189 |
-
yield history, docs_html, output_query, output_language,
|
190 |
|
191 |
except Exception as e:
|
192 |
print(event, "has failed")
|
@@ -214,7 +215,7 @@ async def chat(query,history,audience,sources,reports,current_graphs):
|
|
214 |
print(f"Error logging on Azure Blob Storage: {e}")
|
215 |
raise gr.Error(f"ClimateQ&A Error: {str(e)[:100]} - The error has been noted, try another question and if the error remains, you can contact us :)")
|
216 |
|
217 |
-
yield history, docs_html, output_query, output_language,
|
218 |
|
219 |
|
220 |
def save_feedback(feed: str, user_id):
|
@@ -416,7 +417,7 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
|
|
416 |
samples.append(group_examples)
|
417 |
|
418 |
|
419 |
-
with gr.Tab("Sources",elem_id = "tab-sources",id = 1):
|
420 |
sources_textbox = gr.HTML(show_label=False, elem_id="sources-textbox")
|
421 |
docs_textbox = gr.State("")
|
422 |
|
@@ -435,7 +436,7 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
|
|
435 |
|
436 |
|
437 |
|
438 |
-
with gr.Tab("Papers",elem_id = "tab-citations",id = 5):
|
439 |
btn_summary = gr.Button("Summary")
|
440 |
# Fenêtre simulée pour le Summary
|
441 |
with gr.Group(visible=False, elem_id="papers-summary-popup") as summary_popup:
|
@@ -698,28 +699,30 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
|
|
698 |
current_state = 1 - current_state
|
699 |
return current_state
|
700 |
|
701 |
-
def update_sources_number_display(sources_textbox, figures_cards, current_graphs):
|
702 |
sources_number = sources_textbox.count("<h2>")
|
703 |
figures_number = figures_cards.count("<h2>")
|
704 |
graphs_number = current_graphs.count("<iframe")
|
|
|
705 |
sources_notif_label = f"Sources ({sources_number})"
|
706 |
figures_notif_label = f"Figures ({figures_number})"
|
707 |
graphs_notif_label = f"Recommended content ({graphs_number})"
|
|
|
708 |
|
709 |
-
return gr.update(label = sources_notif_label), gr.update(label = figures_notif_label), gr.update(label = graphs_notif_label)
|
710 |
|
711 |
(textbox
|
712 |
.submit(start_chat, [textbox,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_textbox")
|
713 |
.then(chat, [textbox,chatbot,dropdown_audience, dropdown_sources,dropdown_reports, current_graphs] ,[chatbot,sources_textbox,output_query,output_language, sources_raw, current_graphs],concurrency_limit = 8,api_name = "chat_textbox")
|
714 |
.then(finish_chat, None, [textbox],api_name = "finish_chat_textbox")
|
715 |
-
.then(update_sources_number_display, [sources_textbox, figures_cards, current_graphs],[tab_sources, tab_figures, tab_recommended_content] )
|
716 |
)
|
717 |
|
718 |
(examples_hidden
|
719 |
.change(start_chat, [examples_hidden,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_examples")
|
720 |
.then(chat, [examples_hidden,chatbot,dropdown_audience, dropdown_sources,dropdown_reports, current_graphs] ,[chatbot,sources_textbox,output_query,output_language, sources_raw, current_graphs],concurrency_limit = 8,api_name = "chat_textbox")
|
721 |
.then(finish_chat, None, [textbox],api_name = "finish_chat_examples")
|
722 |
-
.then(update_sources_number_display, [sources_textbox, figures_cards, current_graphs],[tab_sources, tab_figures, tab_recommended_content] )
|
723 |
)
|
724 |
|
725 |
|
@@ -733,9 +736,10 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
|
|
733 |
sources_raw.change(process_figures, inputs=[sources_raw], outputs=[figures_cards, gallery_component])
|
734 |
|
735 |
|
736 |
-
sources_textbox.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs],[tab_sources, tab_figures, tab_recommended_content])
|
737 |
-
figures_cards.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs],[tab_sources, tab_figures, tab_recommended_content])
|
738 |
-
current_graphs.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs],[tab_sources, tab_figures, tab_recommended_content])
|
|
|
739 |
|
740 |
dropdown_samples.change(change_sample_questions,dropdown_samples,samples)
|
741 |
|
|
|
142 |
|
143 |
docs = []
|
144 |
used_figures=[]
|
145 |
+
related_contents = []
|
146 |
docs_html = ""
|
147 |
output_query = ""
|
148 |
output_language = ""
|
|
|
165 |
node = event["metadata"]["langgraph_node"]
|
166 |
|
167 |
if event["event"] == "on_chain_end" and event["name"] == "retrieve_documents" :# when documents are retrieved
|
168 |
+
docs, docs_html, history, used_documents, related_contents = handle_retrieved_documents(event, history, used_documents)
|
169 |
|
170 |
|
171 |
elif event["name"] in steps_display.keys() and event["event"] == "on_chain_start": #display steps
|
|
|
187 |
if event["name"] == "categorize_intent" and event["event"] == "on_chain_start":
|
188 |
print("X")
|
189 |
|
190 |
+
yield history, docs_html, output_query, output_language, related_contents , graphs_html, #,output_query,output_keywords
|
191 |
|
192 |
except Exception as e:
|
193 |
print(event, "has failed")
|
|
|
215 |
print(f"Error logging on Azure Blob Storage: {e}")
|
216 |
raise gr.Error(f"ClimateQ&A Error: {str(e)[:100]} - The error has been noted, try another question and if the error remains, you can contact us :)")
|
217 |
|
218 |
+
yield history, docs_html, output_query, output_language, related_contents, graphs_html
|
219 |
|
220 |
|
221 |
def save_feedback(feed: str, user_id):
|
|
|
417 |
samples.append(group_examples)
|
418 |
|
419 |
|
420 |
+
with gr.Tab("Sources",elem_id = "tab-sources",id = 1) as tab_sources:
|
421 |
sources_textbox = gr.HTML(show_label=False, elem_id="sources-textbox")
|
422 |
docs_textbox = gr.State("")
|
423 |
|
|
|
436 |
|
437 |
|
438 |
|
439 |
+
with gr.Tab("Papers",elem_id = "tab-citations",id = 5) as tab_papers:
|
440 |
btn_summary = gr.Button("Summary")
|
441 |
# Fenêtre simulée pour le Summary
|
442 |
with gr.Group(visible=False, elem_id="papers-summary-popup") as summary_popup:
|
|
|
699 |
current_state = 1 - current_state
|
700 |
return current_state
|
701 |
|
702 |
+
def update_sources_number_display(sources_textbox, figures_cards, current_graphs, papers_html):
|
703 |
sources_number = sources_textbox.count("<h2>")
|
704 |
figures_number = figures_cards.count("<h2>")
|
705 |
graphs_number = current_graphs.count("<iframe")
|
706 |
+
papers_number = papers_html.count("<h2>")
|
707 |
sources_notif_label = f"Sources ({sources_number})"
|
708 |
figures_notif_label = f"Figures ({figures_number})"
|
709 |
graphs_notif_label = f"Recommended content ({graphs_number})"
|
710 |
+
papers_notif_label = f"Papers ({papers_number})"
|
711 |
|
712 |
+
return gr.update(label = sources_notif_label), gr.update(label = figures_notif_label), gr.update(label = graphs_notif_label), gr.update(label = papers_notif_label)
|
713 |
|
714 |
(textbox
|
715 |
.submit(start_chat, [textbox,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_textbox")
|
716 |
.then(chat, [textbox,chatbot,dropdown_audience, dropdown_sources,dropdown_reports, current_graphs] ,[chatbot,sources_textbox,output_query,output_language, sources_raw, current_graphs],concurrency_limit = 8,api_name = "chat_textbox")
|
717 |
.then(finish_chat, None, [textbox],api_name = "finish_chat_textbox")
|
718 |
+
# .then(update_sources_number_display, [sources_textbox, figures_cards, current_graphs,papers_html],[tab_sources, tab_figures, tab_recommended_content, tab_papers] )
|
719 |
)
|
720 |
|
721 |
(examples_hidden
|
722 |
.change(start_chat, [examples_hidden,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_examples")
|
723 |
.then(chat, [examples_hidden,chatbot,dropdown_audience, dropdown_sources,dropdown_reports, current_graphs] ,[chatbot,sources_textbox,output_query,output_language, sources_raw, current_graphs],concurrency_limit = 8,api_name = "chat_textbox")
|
724 |
.then(finish_chat, None, [textbox],api_name = "finish_chat_examples")
|
725 |
+
# .then(update_sources_number_display, [sources_textbox, figures_cards, current_graphs,papers_html],[tab_sources, tab_figures, tab_recommended_content, tab_papers] )
|
726 |
)
|
727 |
|
728 |
|
|
|
736 |
sources_raw.change(process_figures, inputs=[sources_raw], outputs=[figures_cards, gallery_component])
|
737 |
|
738 |
|
739 |
+
sources_textbox.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs,papers_html],[tab_sources, tab_figures, tab_recommended_content, tab_papers])
|
740 |
+
figures_cards.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs,papers_html],[tab_sources, tab_figures, tab_recommended_content, tab_papers])
|
741 |
+
current_graphs.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs,papers_html],[tab_sources, tab_figures, tab_recommended_content, tab_papers])
|
742 |
+
papers_html.change(update_sources_number_display, [sources_textbox, figures_cards, current_graphs,papers_html],[tab_sources, tab_figures, tab_recommended_content, tab_papers])
|
743 |
|
744 |
dropdown_samples.change(change_sample_questions,dropdown_samples,samples)
|
745 |
|
climateqa/event_handler.py
CHANGED
@@ -40,10 +40,12 @@ def handle_retrieved_documents(event: StreamEvent, history : list[ChatMessage],
|
|
40 |
|
41 |
docs_html = "".join(docs_html)
|
42 |
|
|
|
|
|
43 |
except Exception as e:
|
44 |
print(f"Error getting documents: {e}")
|
45 |
print(event)
|
46 |
-
return docs, docs_html, history, used_documents
|
47 |
|
48 |
def stream_answer(history: list[ChatMessage], event : StreamEvent, start_streaming : bool, answer_message_content : str)-> tuple[list[ChatMessage], bool, str]:
|
49 |
"""
|
|
|
40 |
|
41 |
docs_html = "".join(docs_html)
|
42 |
|
43 |
+
related_contents = event["data"]["output"]["related_contents"]
|
44 |
+
|
45 |
except Exception as e:
|
46 |
print(f"Error getting documents: {e}")
|
47 |
print(event)
|
48 |
+
return docs, docs_html, history, used_documents, related_contents
|
49 |
|
50 |
def stream_answer(history: list[ChatMessage], event : StreamEvent, start_streaming : bool, answer_message_content : str)-> tuple[list[ChatMessage], bool, str]:
|
51 |
"""
|
style.css
CHANGED
@@ -242,6 +242,11 @@ label.selected{
|
|
242 |
|
243 |
} */
|
244 |
|
|
|
|
|
|
|
|
|
|
|
245 |
div#tab-examples{
|
246 |
height:calc(100vh - 190px) !important;
|
247 |
overflow-y: scroll !important;
|
|
|
242 |
|
243 |
} */
|
244 |
|
245 |
+
div#tab-citations{
|
246 |
+
height:calc(100vh - 190px) !important;
|
247 |
+
overflow-y: auto !important;
|
248 |
+
}
|
249 |
+
|
250 |
div#tab-examples{
|
251 |
height:calc(100vh - 190px) !important;
|
252 |
overflow-y: scroll !important;
|