timeki commited on
Commit
89a69e6
β€’
2 Parent(s): 6edd6c2 4ab6519

Merge branch 'feature/add_steps_display' into feature/graph_recommandation

Browse files
Files changed (4) hide show
  1. app.py +177 -187
  2. sandbox/20240310 - CQA - Semantic Routing 1.ipynb +0 -0
  3. style.css +18 -2
  4. test.json +0 -0
app.py CHANGED
@@ -15,6 +15,8 @@ import time
15
  import re
16
  import json
17
 
 
 
18
  # from gradio_modal import Modal
19
 
20
  from io import BytesIO
@@ -121,10 +123,8 @@ async def chat(query,history,audience,sources,reports,current_graphs):
121
  reports = []
122
 
123
  inputs = {"user_input": query,"audience": audience_prompt,"sources":sources}
124
- print(f"\n\nInputs:\n {inputs}\n\n")
125
- result = agent.astream_events(inputs,version = "v1") #{"callbacks":[MyCustomAsyncHandler()]})
126
- # result = rag_chain.stream(inputs)
127
-
128
  # path_reformulation = "/logs/reformulation/final_output"
129
  # path_keywords = "/logs/keywords/final_output"
130
  # path_retriever = "/logs/find_documents/final_output"
@@ -146,130 +146,166 @@ async def chat(query,history,audience,sources,reports,current_graphs):
146
  "transform_query":("πŸ”„οΈ Thinking step by step to answer the question",True),
147
  "retrieve_documents":("πŸ”„οΈ Searching in the knowledge base",False),
148
  }
149
-
 
 
150
  try:
151
  async for event in result:
152
 
153
- if event["event"] == "on_chat_model_stream" and event["metadata"]["langgraph_node"] in ["answer_rag", "answer_rag_no_docs", "answer_chitchat", "answer_ai_impact"]:
154
- if start_streaming == False:
155
- start_streaming = True
156
- history[-1] = (query,"")
157
-
158
- new_token = event["data"]["chunk"].content
159
- # time.sleep(0.01)
160
- previous_answer = history[-1][1]
161
- previous_answer = previous_answer if previous_answer is not None else ""
162
- answer_yet = previous_answer + new_token
163
- answer_yet = parse_output_llm_with_sources(answer_yet)
164
- history[-1] = (query,answer_yet)
165
-
166
- if docs_used is True and event["metadata"]["langgraph_node"] in ["answer_rag_no_docs", "answer_chitchat", "answer_ai_impact"]:
167
- docs_used = False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
 
169
- elif docs_used is True and event["name"] == "retrieve_documents" and event["event"] == "on_chain_end":
170
- try:
171
- docs = event["data"]["output"]["documents"]
172
- docs_html = []
173
- for i, d in enumerate(docs, 1):
174
- docs_html.append(make_html_source(d, i))
175
- docs_html = "".join(docs_html)
176
-
177
- except Exception as e:
178
- print(f"Error getting documents: {e}")
179
- print(event)
180
-
181
- # elif event["name"] == "retrieve_documents" and event["event"] == "on_chain_start":
182
- # print(event)
183
- # questions = event["data"]["input"]["questions"]
184
- # questions = "\n".join([f"{i+1}. {q['question']} ({q['source']})" for i,q in enumerate(questions)])
185
- # answer_yet = "πŸ”„οΈ Searching in the knowledge base\n{questions}"
186
- # history[-1] = (query,answer_yet)
187
-
188
- elif event["name"] in ["retrieve_graphs", "retrieve_graphs_ai"] and event["event"] == "on_chain_end":
189
- try:
190
- recommended_content = event["data"]["output"]["recommended_content"]
191
- # graphs = [
192
- # {
193
- # "embedding": x.metadata["returned_content"],
194
- # "metadata": {
195
- # "source": x.metadata["source"],
196
- # "category": x.metadata["category"]
197
- # }
198
- # } for x in recommended_content if x.metadata["source"] == "OWID"
199
- # ]
200
 
201
- unique_graphs = []
202
- seen_embeddings = set()
203
 
204
- for x in recommended_content:
205
- embedding = x.metadata["returned_content"]
206
 
207
- # Check if the embedding has already been seen
208
- if embedding not in seen_embeddings:
209
- unique_graphs.append({
210
- "embedding": embedding,
211
- "metadata": {
212
- "source": x.metadata["source"],
213
- "category": x.metadata["category"]
214
- }
215
- })
216
- # Add the embedding to the seen set
217
- seen_embeddings.add(embedding)
218
 
219
 
220
- categories = {}
221
- for graph in unique_graphs:
222
- category = graph['metadata']['category']
223
- if category not in categories:
224
- categories[category] = []
225
- categories[category].append(graph['embedding'])
226
-
227
- # graphs_html = ""
228
- for category, embeddings in categories.items():
229
- # graphs_html += f"<h3>{category}</h3>"
230
- # current_graphs.append(f"<h3>{category}</h3>")
231
- for embedding in embeddings:
232
- current_graphs.append([embedding, category])
233
- # graphs_html += f"<div>{embedding}</div>"
234
 
235
- except Exception as e:
236
- print(f"Error getting graphs: {e}")
237
-
238
- for event_name,(event_description,display_output) in steps_display.items():
239
- if event["name"] == event_name:
240
- if event["event"] == "on_chain_start":
241
- # answer_yet = f"<p><span class='loader'></span>{event_description}</p>"
242
- # answer_yet = make_toolbox(event_description, "", checked = False)
243
- answer_yet = event_description
244
-
245
- history[-1] = (query,answer_yet)
246
- # elif event["event"] == "on_chain_end":
247
- # answer_yet = ""
248
- # history[-1] = (query,answer_yet)
249
- # if display_output:
250
- # print(event["data"]["output"])
251
-
252
- # if op['path'] == path_reformulation: # reforulated question
253
- # try:
254
- # output_language = op['value']["language"] # str
255
- # output_query = op["value"]["question"]
256
  # except Exception as e:
257
- # raise gr.Error(f"ClimateQ&A Error: {e} - The error has been noted, try another question and if the error remains, you can contact us :)")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
258
 
259
- # if op["path"] == path_keywords:
260
- # try:
261
- # output_keywords = op['value']["keywords"] # str
262
- # output_keywords = " AND ".join(output_keywords)
263
- # except Exception as e:
264
- # pass
265
 
266
 
267
 
268
- history = [tuple(x) for x in history]
269
- yield history,docs_html,output_query,output_language,gallery,current_graphs #,output_query,output_keywords
270
 
271
 
 
 
 
 
 
 
 
 
 
272
  except Exception as e:
 
273
  raise gr.Error(f"{e}")
274
 
275
 
@@ -330,23 +366,24 @@ async def chat(query,history,audience,sources,reports,current_graphs):
330
  history[-1] = (history[-1][0],answer_yet)
331
  history = [tuple(x) for x in history]
332
 
333
- print(f"\n\nImages:\n{gallery}")
334
 
335
- # gallery = [x.metadata["image_path"] for x in docs if (len(x.metadata["image_path"]) > 0 and "IAS" in x.metadata["image_path"])]
336
- # if len(gallery) > 0:
337
- # gallery = list(set("|".join(gallery).split("|")))
338
- # gallery = [get_image_from_azure_blob_storage(x) for x in gallery]
339
 
340
- yield history,docs_html,output_query,output_language,gallery,current_graphs #,output_query,output_keywords
341
 
342
 
343
 
344
- # else:
345
- # docs_string = "No relevant passages found in the climate science reports (IPCC and IPBES)"
346
- # complete_response = "**No relevant passages found in the climate science reports (IPCC and IPBES), you may want to ask a more specific question (specifying your question on climate issues).**"
347
- # messages.append({"role": "assistant", "content": complete_response})
348
- # gradio_format = make_pairs([a["content"] for a in messages[1:]])
349
- # yield gradio_format, messages, docs_string
 
350
 
351
 
352
  def save_feedback(feed: str, user_id):
@@ -392,56 +429,6 @@ papers_cols_widths = {
392
  papers_cols = list(papers_cols_widths.keys())
393
  papers_cols_widths = list(papers_cols_widths.values())
394
 
395
- # async def find_papers(query, keywords,after):
396
-
397
- # summary = ""
398
-
399
- # df_works = oa.search(keywords,after = after)
400
- # df_works = df_works.dropna(subset=["abstract"])
401
- # df_works = oa.rerank(query,df_works,reranker)
402
- # df_works = df_works.sort_values("rerank_score",ascending=False)
403
- # G = oa.make_network(df_works)
404
-
405
- # height = "750px"
406
- # network = oa.show_network(G,color_by = "rerank_score",notebook=False,height = height)
407
- # network_html = network.generate_html()
408
-
409
- # network_html = network_html.replace("'", "\"")
410
- # css_to_inject = "<style>#mynetwork { border: none !important; } .card { border: none !important; }</style>"
411
- # network_html = network_html + css_to_inject
412
-
413
-
414
- # network_html = f"""<iframe style="width: 100%; height: {height};margin:0 auto" name="result" allow="midi; geolocation; microphone; camera;
415
- # display-capture; encrypted-media;" sandbox="allow-modals allow-forms
416
- # allow-scripts allow-same-origin allow-popups
417
- # allow-top-navigation-by-user-activation allow-downloads" allowfullscreen=""
418
- # allowpaymentrequest="" frameborder="0" srcdoc='{network_html}'></iframe>"""
419
-
420
-
421
- # docs = df_works["content"].head(15).tolist()
422
-
423
- # df_works = df_works.reset_index(drop = True).reset_index().rename(columns = {"index":"doc"})
424
- # df_works["doc"] = df_works["doc"] + 1
425
- # df_works = df_works[papers_cols]
426
-
427
- # yield df_works,network_html,summary
428
-
429
- # chain = make_rag_papers_chain(llm)
430
- # result = chain.astream_log({"question": query,"docs": docs,"language":"English"})
431
- # path_answer = "/logs/StrOutputParser/streamed_output/-"
432
-
433
- # async for op in result:
434
-
435
- # op = op.ops[0]
436
-
437
- # if op['path'] == path_answer: # reforulated question
438
- # new_token = op['value'] # str
439
- # summary += new_token
440
- # else:
441
- # continue
442
- # yield df_works,network_html,summary
443
-
444
-
445
 
446
  # --------------------------------------------------------------------
447
  # Gradio
@@ -478,23 +465,28 @@ def save_graph(saved_graphs_state, embedding, category):
478
  return saved_graphs_state, gr.Button("Graph Saved")
479
 
480
 
481
- with gr.Blocks(title="Climate Q&A", css="style.css", theme=theme,elem_id = "main-component") as demo:
482
- user_id_state = gr.State([user_id])
483
 
484
- chat_completed_state = gr.State(0)
485
- current_graphs = gr.State([])
486
- saved_graphs = gr.State({})
 
487
 
488
  with gr.Tab("ClimateQ&A"):
489
 
490
  with gr.Row(elem_id="chatbot-row"):
491
  with gr.Column(scale=2):
492
- state = gr.State([system_template])
493
  chatbot = gr.Chatbot(
494
- value=[(None,init_prompt)],
495
- show_copy_button=True,show_label = False,elem_id="chatbot",layout = "panel",
 
 
 
 
496
  avatar_images = (None,"https://i.ibb.co/YNyd5W2/logo4.png"),
497
- )#,avatar_images = ("assets/logo4.png",None))
498
 
499
  # bot.like(vote,None,None)
500
 
@@ -502,8 +494,7 @@ with gr.Blocks(title="Climate Q&A", css="style.css", theme=theme,elem_id = "main
502
 
503
  with gr.Row(elem_id = "input-message"):
504
  textbox=gr.Textbox(placeholder="Ask me anything here!",show_label=False,scale=7,lines = 1,interactive = True,elem_id="input-textbox")
505
- # submit = gr.Button("",elem_id = "submit-button",scale = 1,interactive = True,icon = "https://static-00.iconduck.com/assets.00/settings-icon-2048x2046-cw28eevx.png")
506
-
507
 
508
  with gr.Column(scale=1, variant="panel",elem_id = "right-panel"):
509
 
@@ -685,8 +676,9 @@ with gr.Blocks(title="Climate Q&A", css="style.css", theme=theme,elem_id = "main
685
 
686
 
687
  def start_chat(query,history):
688
- history = history + [(query,None)]
689
- history = [tuple(x) for x in history]
 
690
  return (gr.update(interactive = False),gr.update(selected=1),history)
691
 
692
  def finish_chat():
@@ -722,8 +714,6 @@ with gr.Blocks(title="Climate Q&A", css="style.css", theme=theme,elem_id = "main
722
 
723
  dropdown_samples.change(change_sample_questions,dropdown_samples,samples)
724
 
725
- # query_papers.submit(generate_keywords,[query_papers], [keywords_papers])
726
- # search_papers.click(find_papers,[query_papers,keywords_papers,after], [papers_dataframe,citations_network,papers_summary])
727
 
728
  demo.queue()
729
 
 
15
  import re
16
  import json
17
 
18
+ from gradio import ChatMessage
19
+
20
  # from gradio_modal import Modal
21
 
22
  from io import BytesIO
 
123
  reports = []
124
 
125
  inputs = {"user_input": query,"audience": audience_prompt,"sources":sources}
126
+ result = agent.astream_events(inputs,version = "v1")
127
+
 
 
128
  # path_reformulation = "/logs/reformulation/final_output"
129
  # path_keywords = "/logs/keywords/final_output"
130
  # path_retriever = "/logs/find_documents/final_output"
 
146
  "transform_query":("πŸ”„οΈ Thinking step by step to answer the question",True),
147
  "retrieve_documents":("πŸ”„οΈ Searching in the knowledge base",False),
148
  }
149
+
150
+ used_documents = []
151
+ answer_message_content = ""
152
  try:
153
  async for event in result:
154
 
155
+ # if event["event"] == "on_chat_model_stream" and event["metadata"]["langgraph_node"] in ["answer_rag", "answer_rag_no_docs", "answer_chitchat", "answer_ai_impact"]:
156
+ # if start_streaming == False:
157
+ # start_streaming = True
158
+ # history[-1] = (query,"")
159
+ if "langgraph_node" in event["metadata"]:
160
+ node = event["metadata"]["langgraph_node"]
161
+
162
+ if event["event"] == "on_chain_end" and event["name"] == "retrieve_documents" :# when documents are retrieved
163
+ try:
164
+ docs = event["data"]["output"]["documents"]
165
+ docs_html = []
166
+ for i, d in enumerate(docs, 1):
167
+ docs_html.append(make_html_source(d, i))
168
+
169
+ used_documents = used_documents + [d.metadata["name"] for d in docs]
170
+ history[-1].content = "Adding sources :\n\n - " + "\n - ".join(np.unique(used_documents))
171
+
172
+ docs_html = "".join(docs_html)
173
+
174
+ except Exception as e:
175
+ print(f"Error getting documents: {e}")
176
+ print(event)
177
+
178
+ elif event["name"] in steps_display.keys() and event["event"] == "on_chain_start": #display steps
179
+ event_description,display_output = steps_display[node]
180
+ if not hasattr(history[-1], 'metadata') or history[-1].metadata["title"] != event_description: # if a new step begins
181
+ history.append(ChatMessage(role="assistant", content = "", metadata={'title' :event_description}))
182
+
183
+ elif event["name"] != "transform_query" and event["event"] == "on_chat_model_stream" and node in ["answer_rag", "answer_search"]:# if streaming answer
184
+ if start_streaming == False:
185
+ start_streaming = True
186
+ history.append(ChatMessage(role="assistant", content = ""))
187
+ answer_message_content += event["data"]["chunk"].content
188
+ answer_message_content = parse_output_llm_with_sources(answer_message_content)
189
+ history[-1] = ChatMessage(role="assistant", content = answer_message_content)
190
+ # history.append(ChatMessage(role="assistant", content = new_message_content))
191
+
192
+ # if docs_used is True and event["metadata"]["langgraph_node"] in ["answer_rag_no_docs", "answer_chitchat", "answer_ai_impact"]:
193
+ # docs_used = False
194
 
195
+ # elif docs_used is True and event["name"] == "retrieve_documents" and event["event"] == "on_chain_end":
196
+ # try:
197
+ # docs = event["data"]["output"]["documents"]
198
+ # docs_html = []
199
+ # for i, d in enumerate(docs, 1):
200
+ # docs_html.append(make_html_source(d, i))
201
+ # docs_html = "".join(docs_html)
202
+
203
+ # except Exception as e:
204
+ # print(f"Error getting documents: {e}")
205
+ # print(event)
206
+
207
+ # # elif event["name"] == "retrieve_documents" and event["event"] == "on_chain_start":
208
+ # # print(event)
209
+ # # questions = event["data"]["input"]["questions"]
210
+ # # questions = "\n".join([f"{i+1}. {q['question']} ({q['source']})" for i,q in enumerate(questions)])
211
+ # # answer_yet = "πŸ”„οΈ Searching in the knowledge base\n{questions}"
212
+ # # history[-1] = (query,answer_yet)
213
+
214
+ # elif event["name"] in ["retrieve_graphs", "retrieve_graphs_ai"] and event["event"] == "on_chain_end":
215
+ # try:
216
+ # recommended_content = event["data"]["output"]["recommended_content"]
217
+ # # graphs = [
218
+ # # {
219
+ # # "embedding": x.metadata["returned_content"],
220
+ # # "metadata": {
221
+ # # "source": x.metadata["source"],
222
+ # # "category": x.metadata["category"]
223
+ # # }
224
+ # # } for x in recommended_content if x.metadata["source"] == "OWID"
225
+ # # ]
226
 
227
+ # unique_graphs = []
228
+ # seen_embeddings = set()
229
 
230
+ # for x in recommended_content:
231
+ # embedding = x.metadata["returned_content"]
232
 
233
+ # # Check if the embedding has already been seen
234
+ # if embedding not in seen_embeddings:
235
+ # unique_graphs.append({
236
+ # "embedding": embedding,
237
+ # "metadata": {
238
+ # "source": x.metadata["source"],
239
+ # "category": x.metadata["category"]
240
+ # }
241
+ # })
242
+ # # Add the embedding to the seen set
243
+ # seen_embeddings.add(embedding)
244
 
245
 
246
+ # categories = {}
247
+ # for graph in unique_graphs:
248
+ # category = graph['metadata']['category']
249
+ # if category not in categories:
250
+ # categories[category] = []
251
+ # categories[category].append(graph['embedding'])
252
+
253
+ # # graphs_html = ""
254
+ # for category, embeddings in categories.items():
255
+ # # graphs_html += f"<h3>{category}</h3>"
256
+ # # current_graphs.append(f"<h3>{category}</h3>")
257
+ # for embedding in embeddings:
258
+ # current_graphs.append([embedding, category])
259
+ # # graphs_html += f"<div>{embedding}</div>"
260
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
261
  # except Exception as e:
262
+ # print(f"Error getting graphs: {e}")
263
+
264
+ # for event_name,(event_description,display_output) in steps_display.items():
265
+ # if event["name"] == event_name:
266
+ # if event["event"] == "on_chain_start":
267
+ # # answer_yet = f"<p><span class='loader'></span>{event_description}</p>"
268
+ # # answer_yet = make_toolbox(event_description, "", checked = False)
269
+ # answer_yet = event_description
270
+
271
+ # history[-1] = (query,answer_yet)
272
+ # # elif event["event"] == "on_chain_end":
273
+ # # answer_yet = ""
274
+ # # history[-1] = (query,answer_yet)
275
+ # # if display_output:
276
+ # # print(event["data"]["output"])
277
+
278
+ # # if op['path'] == path_reformulation: # reforulated question
279
+ # # try:
280
+ # # output_language = op['value']["language"] # str
281
+ # # output_query = op["value"]["question"]
282
+ # # except Exception as e:
283
+ # # raise gr.Error(f"ClimateQ&A Error: {e} - The error has been noted, try another question and if the error remains, you can contact us :)")
284
 
285
+ # # if op["path"] == path_keywords:
286
+ # # try:
287
+ # # output_keywords = op['value']["keywords"] # str
288
+ # # output_keywords = " AND ".join(output_keywords)
289
+ # # except Exception as e:
290
+ # # pass
291
 
292
 
293
 
294
+ # history = [tuple(x) for x in history]
295
+ # yield history,docs_html,output_query,output_language,gallery,current_graphs #,output_query,output_keywords
296
 
297
 
298
+ if event["name"] == "transform_query" and event["event"] =="on_chain_end":
299
+ if hasattr(history[-1],"content"):
300
+ history[-1].content += "Decompose question into sub-questions: \n\n - " + "\n - ".join([q["question"] for q in event["data"]["output"]["remaining_questions"]])
301
+
302
+ if event["name"] == "categorize_intent" and event["event"] == "on_chain_start":
303
+ print("X")
304
+
305
+ yield history,docs_html,output_query,output_language,gallery #,output_query,output_keywords
306
+
307
  except Exception as e:
308
+ print(event, "has failed")
309
  raise gr.Error(f"{e}")
310
 
311
 
 
366
  history[-1] = (history[-1][0],answer_yet)
367
  history = [tuple(x) for x in history]
368
 
369
+ # print(f"\n\nImages:\n{gallery}")
370
 
371
+ # # gallery = [x.metadata["image_path"] for x in docs if (len(x.metadata["image_path"]) > 0 and "IAS" in x.metadata["image_path"])]
372
+ # # if len(gallery) > 0:
373
+ # # gallery = list(set("|".join(gallery).split("|")))
374
+ # # gallery = [get_image_from_azure_blob_storage(x) for x in gallery]
375
 
376
+ # yield history,docs_html,output_query,output_language,gallery,current_graphs #,output_query,output_keywords
377
 
378
 
379
 
380
+ # # else:
381
+ # # docs_string = "No relevant passages found in the climate science reports (IPCC and IPBES)"
382
+ # # complete_response = "**No relevant passages found in the climate science reports (IPCC and IPBES), you may want to ask a more specific question (specifying your question on climate issues).**"
383
+ # # messages.append({"role": "assistant", "content": complete_response})
384
+ # # gradio_format = make_pairs([a["content"] for a in messages[1:]])
385
+ # # yield gradio_format, messages, docs_string
386
+ yield history,docs_html,output_query,output_language,gallery#,output_query,output_keywords
387
 
388
 
389
  def save_feedback(feed: str, user_id):
 
429
  papers_cols = list(papers_cols_widths.keys())
430
  papers_cols_widths = list(papers_cols_widths.values())
431
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
432
 
433
  # --------------------------------------------------------------------
434
  # Gradio
 
465
  return saved_graphs_state, gr.Button("Graph Saved")
466
 
467
 
468
+ # with gr.Blocks(title="Climate Q&A", css="style.css", theme=theme,elem_id = "main-component") as demo:
469
+ # user_id_state = gr.State([user_id])
470
 
471
+ # chat_completed_state = gr.State(0)
472
+ # current_graphs = gr.State([])
473
+ # saved_graphs = gr.State({})
474
+ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme,elem_id = "main-component") as demo:
475
 
476
  with gr.Tab("ClimateQ&A"):
477
 
478
  with gr.Row(elem_id="chatbot-row"):
479
  with gr.Column(scale=2):
480
+ # state = gr.State([system_template])
481
  chatbot = gr.Chatbot(
482
+ value = [ChatMessage(role="assistant", content=init_prompt)],
483
+ type = "messages",
484
+ show_copy_button=True,
485
+ show_label = False,
486
+ elem_id="chatbot",
487
+ layout = "panel",
488
  avatar_images = (None,"https://i.ibb.co/YNyd5W2/logo4.png"),
489
+ )
490
 
491
  # bot.like(vote,None,None)
492
 
 
494
 
495
  with gr.Row(elem_id = "input-message"):
496
  textbox=gr.Textbox(placeholder="Ask me anything here!",show_label=False,scale=7,lines = 1,interactive = True,elem_id="input-textbox")
497
+
 
498
 
499
  with gr.Column(scale=1, variant="panel",elem_id = "right-panel"):
500
 
 
676
 
677
 
678
  def start_chat(query,history):
679
+ # history = history + [(query,None)]
680
+ # history = [tuple(x) for x in history]
681
+ history = history + [ChatMessage(role="user", content=query)]
682
  return (gr.update(interactive = False),gr.update(selected=1),history)
683
 
684
  def finish_chat():
 
714
 
715
  dropdown_samples.change(change_sample_questions,dropdown_samples,samples)
716
 
 
 
717
 
718
  demo.queue()
719
 
sandbox/20240310 - CQA - Semantic Routing 1.ipynb CHANGED
The diff for this file is too large to render. See raw diff
 
style.css CHANGED
@@ -2,6 +2,14 @@
2
  /* :root {
3
  --user-image: url('https://ih1.redbubble.net/image.4776899543.6215/st,small,507x507-pad,600x600,f8f8f8.jpg');
4
  } */
 
 
 
 
 
 
 
 
5
 
6
  .warning-box {
7
  background-color: #fff3cd;
@@ -57,6 +65,7 @@ body.dark .tip-box * {
57
 
58
  .message{
59
  font-size:14px !important;
 
60
  }
61
 
62
 
@@ -65,6 +74,10 @@ a {
65
  color: inherit;
66
  }
67
 
 
 
 
 
68
  .card {
69
  background-color: white;
70
  border-radius: 10px;
@@ -426,7 +439,7 @@ span.chatbot > p > img{
426
 
427
  .loader {
428
  border: 1px solid #d0d0d0 !important; /* Light grey background */
429
- border-top: 1px solid #3498db !important; /* Blue color */
430
  border-right: 1px solid #3498db !important; /* Blue color */
431
  border-radius: 50%;
432
  width: 20px;
@@ -492,4 +505,7 @@ div#tab-saved-graphs {
492
  max-height: 50vh; /* Reduce height for smaller screens */
493
  overflow-y: auto;
494
  }
495
- }
 
 
 
 
2
  /* :root {
3
  --user-image: url('https://ih1.redbubble.net/image.4776899543.6215/st,small,507x507-pad,600x600,f8f8f8.jpg');
4
  } */
5
+ .avatar-container.svelte-1x5p6hu:not(.thumbnail-item) img {
6
+ width: 100%;
7
+ height: 100%;
8
+ object-fit: cover;
9
+ border-radius: 50%;
10
+ padding: 0px;
11
+ margin: 0px;
12
+ }
13
 
14
  .warning-box {
15
  background-color: #fff3cd;
 
65
 
66
  .message{
67
  font-size:14px !important;
68
+
69
  }
70
 
71
 
 
74
  color: inherit;
75
  }
76
 
77
+ .doc-ref sup{
78
+ color:#dc2626!important;
79
+ /* margin-right:1px; */
80
+ }
81
  .card {
82
  background-color: white;
83
  border-radius: 10px;
 
439
 
440
  .loader {
441
  border: 1px solid #d0d0d0 !important; /* Light grey background */
442
+ border-top: 1px solid #db3434 !important; /* Blue color */
443
  border-right: 1px solid #3498db !important; /* Blue color */
444
  border-radius: 50%;
445
  width: 20px;
 
505
  max-height: 50vh; /* Reduce height for smaller screens */
506
  overflow-y: auto;
507
  }
508
+ }
509
+ .message-buttons-left.panel.message-buttons.with-avatar {
510
+ display: none;
511
+ }
test.json ADDED
File without changes