add message types
Browse files- app.py +38 -19
- sandbox/20240310 - CQA - Semantic Routing 1.ipynb +0 -0
app.py
CHANGED
@@ -15,6 +15,8 @@ import time
|
|
15 |
import re
|
16 |
import json
|
17 |
|
|
|
|
|
18 |
# from gradio_modal import Modal
|
19 |
|
20 |
from io import BytesIO
|
@@ -141,21 +143,29 @@ async def chat(query,history,audience,sources,reports):
|
|
141 |
|
142 |
try:
|
143 |
async for event in result:
|
144 |
-
if event["event"] == "on_chat_model_stream":
|
145 |
if start_streaming == False:
|
146 |
start_streaming = True
|
147 |
-
history[-1] = (query,"")
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
156 |
|
157 |
|
158 |
-
elif event["name"] == "retrieve_documents" and event["event"] == "on_chain_end":
|
159 |
try:
|
160 |
docs = event["data"]["output"]["documents"]
|
161 |
docs_html = []
|
@@ -173,14 +183,17 @@ async def chat(query,history,audience,sources,reports):
|
|
173 |
# answer_yet = "🔄️ Searching in the knowledge base\n{questions}"
|
174 |
# history[-1] = (query,answer_yet)
|
175 |
|
176 |
-
|
177 |
-
for event_name,(event_description,display_output) in steps_display.items():
|
178 |
if event["name"] == event_name:
|
179 |
if event["event"] == "on_chain_start":
|
180 |
# answer_yet = f"<p><span class='loader'></span>{event_description}</p>"
|
181 |
# answer_yet = make_toolbox(event_description, "", checked = False)
|
182 |
answer_yet = event_description
|
183 |
-
|
|
|
|
|
|
|
184 |
# elif event["event"] == "on_chain_end":
|
185 |
# answer_yet = ""
|
186 |
# history[-1] = (query,answer_yet)
|
@@ -203,7 +216,7 @@ async def chat(query,history,audience,sources,reports):
|
|
203 |
|
204 |
|
205 |
|
206 |
-
history = [tuple(x) for x in history]
|
207 |
yield history,docs_html,output_query,output_language,gallery,output_query,output_keywords
|
208 |
|
209 |
except Exception as e:
|
@@ -415,8 +428,13 @@ with gr.Blocks(title="Climate Q&A", css="style.css", theme=theme,elem_id = "main
|
|
415 |
with gr.Column(scale=2):
|
416 |
# state = gr.State([system_template])
|
417 |
chatbot = gr.Chatbot(
|
418 |
-
value=[(None,init_prompt)],
|
419 |
-
|
|
|
|
|
|
|
|
|
|
|
420 |
avatar_images = (None,"https://i.ibb.co/YNyd5W2/logo4.png"),
|
421 |
)#,avatar_images = ("assets/logo4.png",None))
|
422 |
|
@@ -534,8 +552,9 @@ with gr.Blocks(title="Climate Q&A", css="style.css", theme=theme,elem_id = "main
|
|
534 |
|
535 |
|
536 |
def start_chat(query,history):
|
537 |
-
history = history + [(query,None)]
|
538 |
-
history = [tuple(x) for x in history]
|
|
|
539 |
return (gr.update(interactive = False),gr.update(selected=1),history)
|
540 |
|
541 |
def finish_chat():
|
|
|
15 |
import re
|
16 |
import json
|
17 |
|
18 |
+
from gradio import ChatMessage
|
19 |
+
|
20 |
# from gradio_modal import Modal
|
21 |
|
22 |
from io import BytesIO
|
|
|
143 |
|
144 |
try:
|
145 |
async for event in result:
|
146 |
+
if event["event"] == "on_chat_model_stream" and event["name"] != "transform_query": # if streaming answer
|
147 |
if start_streaming == False:
|
148 |
start_streaming = True
|
149 |
+
# history[-1] = (query,"")
|
150 |
+
history.append(ChatMessage(role="assistant", content = ""))
|
151 |
+
if type(history[-1].metadata) != dict :
|
152 |
+
# print("metadata : ", history[-1].metadata)
|
153 |
+
# history.append(ChatMessage(role="assistant", content = ""))
|
154 |
+
|
155 |
+
last_message_content = history[-1].content
|
156 |
+
last_message_content += event["data"]["chunk"].content
|
157 |
+
last_message_content = parse_output_llm_with_sources(last_message_content)
|
158 |
+
history[-1] = ChatMessage(role="assistant", content = last_message_content)
|
159 |
+
# new_token = event["data"]["chunk"].content
|
160 |
+
# # time.sleep(0.01)
|
161 |
+
# previous_answer = history[-1][1]
|
162 |
+
# previous_answer = previous_answer if previous_answer is not None else ""
|
163 |
+
# answer_yet = previous_answer + new_token
|
164 |
+
# answer_yet = parse_output_llm_with_sources(answer_yet)
|
165 |
+
# history[-1] = (query,answer_yet)
|
166 |
|
167 |
|
168 |
+
elif event["name"] == "retrieve_documents" and event["event"] == "on_chain_end": # when documents are retrieved
|
169 |
try:
|
170 |
docs = event["data"]["output"]["documents"]
|
171 |
docs_html = []
|
|
|
183 |
# answer_yet = "🔄️ Searching in the knowledge base\n{questions}"
|
184 |
# history[-1] = (query,answer_yet)
|
185 |
|
186 |
+
# TODO append step de tool avec les questions qui sont utilisées pour la recherche
|
187 |
+
for event_name,(event_description,display_output) in steps_display.items(): # display steps
|
188 |
if event["name"] == event_name:
|
189 |
if event["event"] == "on_chain_start":
|
190 |
# answer_yet = f"<p><span class='loader'></span>{event_description}</p>"
|
191 |
# answer_yet = make_toolbox(event_description, "", checked = False)
|
192 |
answer_yet = event_description
|
193 |
+
# answer_yet = ChatMessage(role="assistant", content = "processing", metadata={'title' :event_description})
|
194 |
+
|
195 |
+
history.append(ChatMessage(role="assistant", content = "", metadata={'title' :event_description}))
|
196 |
+
# history[-1] = (query,answer_yet)
|
197 |
# elif event["event"] == "on_chain_end":
|
198 |
# answer_yet = ""
|
199 |
# history[-1] = (query,answer_yet)
|
|
|
216 |
|
217 |
|
218 |
|
219 |
+
# history = [tuple(x) for x in history]
|
220 |
yield history,docs_html,output_query,output_language,gallery,output_query,output_keywords
|
221 |
|
222 |
except Exception as e:
|
|
|
428 |
with gr.Column(scale=2):
|
429 |
# state = gr.State([system_template])
|
430 |
chatbot = gr.Chatbot(
|
431 |
+
# value=[(None,init_prompt)],
|
432 |
+
value = [ChatMessage(role="assistant", content=init_prompt)],
|
433 |
+
type = "messages",
|
434 |
+
show_copy_button=True,
|
435 |
+
show_label = False,
|
436 |
+
elem_id="chatbot",
|
437 |
+
layout = "panel",
|
438 |
avatar_images = (None,"https://i.ibb.co/YNyd5W2/logo4.png"),
|
439 |
)#,avatar_images = ("assets/logo4.png",None))
|
440 |
|
|
|
552 |
|
553 |
|
554 |
def start_chat(query,history):
|
555 |
+
# history = history + [(query,None)]
|
556 |
+
# history = [tuple(x) for x in history]
|
557 |
+
history = history + [ChatMessage(role="user", content=query)]
|
558 |
return (gr.update(interactive = False),gr.update(selected=1),history)
|
559 |
|
560 |
def finish_chat():
|
sandbox/20240310 - CQA - Semantic Routing 1.ipynb
CHANGED
The diff for this file is too large to render.
See raw diff
|
|